var/home/core/zuul-output/0000755000175000017500000000000015113356673014537 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113374543015477 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005734122215113374533017710 0ustar rootrootDec 01 18:29:44 crc systemd[1]: Starting Kubernetes Kubelet... Dec 01 18:29:44 crc restorecon[4697]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:44 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 18:29:45 crc restorecon[4697]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 18:29:45 crc restorecon[4697]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 01 18:29:46 crc kubenswrapper[4935]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 18:29:46 crc kubenswrapper[4935]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 01 18:29:46 crc kubenswrapper[4935]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 18:29:46 crc kubenswrapper[4935]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 18:29:46 crc kubenswrapper[4935]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 01 18:29:46 crc kubenswrapper[4935]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.326820 4935 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330518 4935 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330543 4935 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330551 4935 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330558 4935 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330565 4935 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330572 4935 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330584 4935 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330590 4935 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330726 4935 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330736 4935 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330743 4935 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330751 4935 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330759 4935 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330766 4935 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330776 4935 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330785 4935 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330792 4935 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330799 4935 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330813 4935 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330820 4935 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330893 4935 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330901 4935 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330909 4935 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330916 4935 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330923 4935 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330929 4935 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330936 4935 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330942 4935 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330949 4935 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330955 4935 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330962 4935 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330975 4935 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330981 4935 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330988 4935 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.330995 4935 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331002 4935 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331008 4935 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331019 4935 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331029 4935 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331037 4935 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331044 4935 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331051 4935 feature_gate.go:330] unrecognized feature gate: Example Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331058 4935 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331071 4935 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331082 4935 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331089 4935 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331095 4935 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331102 4935 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331108 4935 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331115 4935 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331121 4935 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331130 4935 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331139 4935 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331171 4935 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331180 4935 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331194 4935 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331202 4935 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331209 4935 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331216 4935 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331224 4935 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331230 4935 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331237 4935 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331243 4935 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331252 4935 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331289 4935 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331297 4935 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331312 4935 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331318 4935 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331325 4935 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331332 4935 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.331338 4935 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332260 4935 flags.go:64] FLAG: --address="0.0.0.0" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332667 4935 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332691 4935 flags.go:64] FLAG: --anonymous-auth="true" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332703 4935 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332716 4935 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332726 4935 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332738 4935 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332749 4935 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332758 4935 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332766 4935 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332776 4935 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332785 4935 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332793 4935 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332801 4935 flags.go:64] FLAG: --cgroup-root="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332809 4935 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332817 4935 flags.go:64] FLAG: --client-ca-file="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332824 4935 flags.go:64] FLAG: --cloud-config="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332832 4935 flags.go:64] FLAG: --cloud-provider="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332840 4935 flags.go:64] FLAG: --cluster-dns="[]" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332854 4935 flags.go:64] FLAG: --cluster-domain="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332862 4935 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332871 4935 flags.go:64] FLAG: --config-dir="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332879 4935 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332888 4935 flags.go:64] FLAG: --container-log-max-files="5" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332899 4935 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332907 4935 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332916 4935 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332925 4935 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332933 4935 flags.go:64] FLAG: --contention-profiling="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332941 4935 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332949 4935 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332968 4935 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.332985 4935 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333011 4935 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333020 4935 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333027 4935 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333036 4935 flags.go:64] FLAG: --enable-load-reader="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333045 4935 flags.go:64] FLAG: --enable-server="true" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333053 4935 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333068 4935 flags.go:64] FLAG: --event-burst="100" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333076 4935 flags.go:64] FLAG: --event-qps="50" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333085 4935 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333093 4935 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333101 4935 flags.go:64] FLAG: --eviction-hard="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333112 4935 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333120 4935 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333128 4935 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333137 4935 flags.go:64] FLAG: --eviction-soft="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333170 4935 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333179 4935 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333188 4935 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333196 4935 flags.go:64] FLAG: --experimental-mounter-path="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333205 4935 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333213 4935 flags.go:64] FLAG: --fail-swap-on="true" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333221 4935 flags.go:64] FLAG: --feature-gates="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333232 4935 flags.go:64] FLAG: --file-check-frequency="20s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333241 4935 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333250 4935 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333259 4935 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333267 4935 flags.go:64] FLAG: --healthz-port="10248" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333275 4935 flags.go:64] FLAG: --help="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333283 4935 flags.go:64] FLAG: --hostname-override="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333291 4935 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333299 4935 flags.go:64] FLAG: --http-check-frequency="20s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333308 4935 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333316 4935 flags.go:64] FLAG: --image-credential-provider-config="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333324 4935 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333333 4935 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333340 4935 flags.go:64] FLAG: --image-service-endpoint="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333348 4935 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333355 4935 flags.go:64] FLAG: --kube-api-burst="100" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333364 4935 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333372 4935 flags.go:64] FLAG: --kube-api-qps="50" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333381 4935 flags.go:64] FLAG: --kube-reserved="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333389 4935 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333398 4935 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333406 4935 flags.go:64] FLAG: --kubelet-cgroups="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333414 4935 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333422 4935 flags.go:64] FLAG: --lock-file="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333430 4935 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333438 4935 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333446 4935 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333462 4935 flags.go:64] FLAG: --log-json-split-stream="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333470 4935 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333478 4935 flags.go:64] FLAG: --log-text-split-stream="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333486 4935 flags.go:64] FLAG: --logging-format="text" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333494 4935 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333502 4935 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333510 4935 flags.go:64] FLAG: --manifest-url="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333518 4935 flags.go:64] FLAG: --manifest-url-header="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333531 4935 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333539 4935 flags.go:64] FLAG: --max-open-files="1000000" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333549 4935 flags.go:64] FLAG: --max-pods="110" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333558 4935 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333566 4935 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333574 4935 flags.go:64] FLAG: --memory-manager-policy="None" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333582 4935 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333591 4935 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333599 4935 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333607 4935 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333629 4935 flags.go:64] FLAG: --node-status-max-images="50" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333636 4935 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333644 4935 flags.go:64] FLAG: --oom-score-adj="-999" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333653 4935 flags.go:64] FLAG: --pod-cidr="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333661 4935 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333678 4935 flags.go:64] FLAG: --pod-manifest-path="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333687 4935 flags.go:64] FLAG: --pod-max-pids="-1" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333696 4935 flags.go:64] FLAG: --pods-per-core="0" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333703 4935 flags.go:64] FLAG: --port="10250" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333711 4935 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333720 4935 flags.go:64] FLAG: --provider-id="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333727 4935 flags.go:64] FLAG: --qos-reserved="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333735 4935 flags.go:64] FLAG: --read-only-port="10255" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333745 4935 flags.go:64] FLAG: --register-node="true" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333753 4935 flags.go:64] FLAG: --register-schedulable="true" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333761 4935 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333788 4935 flags.go:64] FLAG: --registry-burst="10" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333796 4935 flags.go:64] FLAG: --registry-qps="5" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333803 4935 flags.go:64] FLAG: --reserved-cpus="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333812 4935 flags.go:64] FLAG: --reserved-memory="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333823 4935 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333831 4935 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333840 4935 flags.go:64] FLAG: --rotate-certificates="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333848 4935 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333856 4935 flags.go:64] FLAG: --runonce="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333864 4935 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333872 4935 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333881 4935 flags.go:64] FLAG: --seccomp-default="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333888 4935 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333897 4935 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333906 4935 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333914 4935 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333922 4935 flags.go:64] FLAG: --storage-driver-password="root" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333930 4935 flags.go:64] FLAG: --storage-driver-secure="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333938 4935 flags.go:64] FLAG: --storage-driver-table="stats" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333947 4935 flags.go:64] FLAG: --storage-driver-user="root" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333954 4935 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333963 4935 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333971 4935 flags.go:64] FLAG: --system-cgroups="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333979 4935 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.333993 4935 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.334001 4935 flags.go:64] FLAG: --tls-cert-file="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.334010 4935 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.334020 4935 flags.go:64] FLAG: --tls-min-version="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.334028 4935 flags.go:64] FLAG: --tls-private-key-file="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.334036 4935 flags.go:64] FLAG: --topology-manager-policy="none" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.334044 4935 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.334053 4935 flags.go:64] FLAG: --topology-manager-scope="container" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.334061 4935 flags.go:64] FLAG: --v="2" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.334072 4935 flags.go:64] FLAG: --version="false" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.334084 4935 flags.go:64] FLAG: --vmodule="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.334094 4935 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.334103 4935 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334390 4935 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334406 4935 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334416 4935 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334424 4935 feature_gate.go:330] unrecognized feature gate: Example Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334433 4935 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334441 4935 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334449 4935 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334457 4935 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334466 4935 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334474 4935 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334480 4935 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334488 4935 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334495 4935 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334502 4935 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334509 4935 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334516 4935 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334523 4935 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334530 4935 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334537 4935 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334543 4935 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334550 4935 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334557 4935 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334564 4935 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334571 4935 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334578 4935 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334585 4935 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334593 4935 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334600 4935 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334607 4935 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334614 4935 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334621 4935 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334628 4935 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334634 4935 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334641 4935 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334652 4935 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334661 4935 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334668 4935 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334676 4935 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334684 4935 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334693 4935 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334702 4935 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334710 4935 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334717 4935 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334724 4935 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334734 4935 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334743 4935 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334751 4935 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334758 4935 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334764 4935 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334772 4935 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334779 4935 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334786 4935 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334793 4935 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334799 4935 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334806 4935 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334814 4935 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334820 4935 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334828 4935 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334836 4935 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334842 4935 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334850 4935 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334857 4935 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334864 4935 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334870 4935 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334877 4935 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334884 4935 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334890 4935 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334898 4935 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334907 4935 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334915 4935 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.334922 4935 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.335180 4935 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.343484 4935 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.343514 4935 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343599 4935 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343611 4935 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343618 4935 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343623 4935 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343629 4935 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343635 4935 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343641 4935 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343646 4935 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343651 4935 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343657 4935 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343662 4935 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343667 4935 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343673 4935 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343678 4935 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343683 4935 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343688 4935 feature_gate.go:330] unrecognized feature gate: Example Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343693 4935 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343699 4935 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343705 4935 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343710 4935 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343715 4935 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343720 4935 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343727 4935 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343734 4935 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343740 4935 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343746 4935 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343751 4935 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343757 4935 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343763 4935 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343768 4935 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343773 4935 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343779 4935 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343784 4935 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343789 4935 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343797 4935 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343804 4935 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343810 4935 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343816 4935 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343823 4935 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343829 4935 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343835 4935 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343840 4935 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343846 4935 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343852 4935 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343858 4935 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343863 4935 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343868 4935 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343874 4935 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343879 4935 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343884 4935 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343889 4935 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343895 4935 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343900 4935 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343905 4935 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343910 4935 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343915 4935 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343922 4935 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343927 4935 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343932 4935 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343938 4935 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343943 4935 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343948 4935 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343953 4935 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343958 4935 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343964 4935 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343969 4935 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343974 4935 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343979 4935 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343984 4935 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343989 4935 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.343996 4935 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.344005 4935 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344181 4935 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344191 4935 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344199 4935 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344206 4935 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344212 4935 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344217 4935 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344222 4935 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344228 4935 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344233 4935 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344238 4935 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344243 4935 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344249 4935 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344254 4935 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344259 4935 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344264 4935 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344270 4935 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344275 4935 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344280 4935 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344287 4935 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344294 4935 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344300 4935 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344305 4935 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344311 4935 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344316 4935 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344321 4935 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344327 4935 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344332 4935 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344338 4935 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344344 4935 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344349 4935 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344356 4935 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344363 4935 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344369 4935 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344375 4935 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344381 4935 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344387 4935 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344393 4935 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344398 4935 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344404 4935 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344409 4935 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344414 4935 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344419 4935 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344424 4935 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344430 4935 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344435 4935 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344440 4935 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344445 4935 feature_gate.go:330] unrecognized feature gate: Example Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344450 4935 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344455 4935 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344460 4935 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344466 4935 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344471 4935 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344476 4935 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344481 4935 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344487 4935 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344492 4935 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344498 4935 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344503 4935 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344508 4935 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344514 4935 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344519 4935 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344524 4935 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344529 4935 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344534 4935 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344539 4935 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344544 4935 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344549 4935 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344555 4935 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344560 4935 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344566 4935 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.344573 4935 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.344582 4935 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.344972 4935 server.go:940] "Client rotation is on, will bootstrap in background" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.348338 4935 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.348431 4935 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.349105 4935 server.go:997] "Starting client certificate rotation" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.349137 4935 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.349852 4935 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-19 21:14:40.029470975 +0000 UTC Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.350031 4935 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 434h44m53.679447207s for next certificate rotation Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.364770 4935 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.368501 4935 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.380945 4935 log.go:25] "Validated CRI v1 runtime API" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.407951 4935 log.go:25] "Validated CRI v1 image API" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.409686 4935 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.412806 4935 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-01-18-25-02-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.412834 4935 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.429460 4935 manager.go:217] Machine: {Timestamp:2025-12-01 18:29:46.427896583 +0000 UTC m=+0.449525882 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:5ab99546-e564-412e-b1e6-598dab154fb0 BootID:9d6c30b1-43b8-4422-8964-f97321d04fb0 Filesystems:[{Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:b4:ad:e0 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:b4:ad:e0 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:89:2e:9e Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:da:b7:91 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:cf:ef:cb Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:98:d0:11 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:7e:10:57:c2:68:4e Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:52:49:82:ae:a4:47 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.429815 4935 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.430048 4935 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.432536 4935 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.432952 4935 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.433017 4935 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.433451 4935 topology_manager.go:138] "Creating topology manager with none policy" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.433472 4935 container_manager_linux.go:303] "Creating device plugin manager" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.433793 4935 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.433840 4935 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.434070 4935 state_mem.go:36] "Initialized new in-memory state store" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.434232 4935 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.435282 4935 kubelet.go:418] "Attempting to sync node with API server" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.435318 4935 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.435366 4935 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.435393 4935 kubelet.go:324] "Adding apiserver pod source" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.435412 4935 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.438006 4935 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.438402 4935 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.439304 4935 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.439858 4935 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.439882 4935 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.439893 4935 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.439902 4935 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.439916 4935 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.439925 4935 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.439933 4935 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.439946 4935 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.439955 4935 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.439964 4935 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.439983 4935 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.439992 4935 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.440758 4935 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.441118 4935 server.go:1280] "Started kubelet" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.441399 4935 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.441628 4935 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.442171 4935 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.442288 4935 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.442331 4935 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Dec 01 18:29:46 crc kubenswrapper[4935]: E1201 18:29:46.442373 4935 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Dec 01 18:29:46 crc kubenswrapper[4935]: E1201 18:29:46.442415 4935 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Dec 01 18:29:46 crc systemd[1]: Started Kubernetes Kubelet. Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.444260 4935 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.444364 4935 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.444382 4935 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 13:14:46.214004241 +0000 UTC Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.444489 4935 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1122h44m59.769521298s for next certificate rotation Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.444502 4935 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.444517 4935 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 01 18:29:46 crc kubenswrapper[4935]: E1201 18:29:46.444566 4935 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.444626 4935 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.445719 4935 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.445837 4935 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Dec 01 18:29:46 crc kubenswrapper[4935]: E1201 18:29:46.445899 4935 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Dec 01 18:29:46 crc kubenswrapper[4935]: E1201 18:29:46.446140 4935 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.65:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d2ae3b2940248 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 18:29:46.441089608 +0000 UTC m=+0.462718857,LastTimestamp:2025-12-01 18:29:46.441089608 +0000 UTC m=+0.462718857,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.447641 4935 server.go:460] "Adding debug handlers to kubelet server" Dec 01 18:29:46 crc kubenswrapper[4935]: E1201 18:29:46.452642 4935 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="200ms" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.453097 4935 factory.go:55] Registering systemd factory Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.453127 4935 factory.go:221] Registration of the systemd container factory successfully Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.455006 4935 factory.go:153] Registering CRI-O factory Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.455041 4935 factory.go:221] Registration of the crio container factory successfully Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.455176 4935 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.455224 4935 factory.go:103] Registering Raw factory Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.455252 4935 manager.go:1196] Started watching for new ooms in manager Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463644 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463709 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463725 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463736 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463748 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463759 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463770 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463782 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463796 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463807 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463821 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463833 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463844 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463860 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463871 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463908 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463922 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463940 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.463996 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464010 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464022 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464033 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464044 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464076 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464088 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464098 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464196 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464212 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464227 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464245 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464260 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464273 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464285 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464297 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464310 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464323 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464337 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464349 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464360 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464377 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464388 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464399 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464410 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464423 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464436 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464447 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464448 4935 manager.go:319] Starting recovery of all containers Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464458 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464843 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464873 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464889 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464905 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464921 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464942 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464956 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464971 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.464985 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465000 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465014 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465027 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465040 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465054 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465068 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465082 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465094 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465109 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465120 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465133 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465162 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465175 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465187 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465200 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465213 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465224 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465236 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465248 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465261 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465275 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465288 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465301 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465316 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465331 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465354 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465368 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465380 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465392 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465405 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465416 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465431 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465442 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465454 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465470 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465483 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465496 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465508 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465520 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465532 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465545 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465560 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465572 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465584 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465596 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465608 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465620 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465632 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465650 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465664 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465680 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465695 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465709 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465723 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465737 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465750 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465763 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465777 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465792 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465806 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465819 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465833 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465844 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465859 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465889 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465911 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.465926 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466648 4935 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466696 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466717 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466735 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466750 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466768 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466782 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466797 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466813 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466829 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466847 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466862 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466879 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466896 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466913 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466929 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466947 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466962 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466979 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.466996 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467019 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467036 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467052 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467069 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467085 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467101 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467117 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467133 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467177 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467196 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467215 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467232 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467248 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467264 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467280 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467297 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467311 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467329 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467344 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467361 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467377 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467395 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467411 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467426 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467444 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467462 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467479 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467495 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467510 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467525 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467540 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467555 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467570 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467588 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467603 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467619 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467634 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467652 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467669 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467686 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467702 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467717 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467735 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467750 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467766 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467783 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467798 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467814 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467829 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467852 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467869 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467885 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467900 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467916 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467932 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467949 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467967 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467982 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.467997 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.468011 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.468026 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.468040 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.468057 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.468073 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.468088 4935 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.468104 4935 reconstruct.go:97] "Volume reconstruction finished" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.468116 4935 reconciler.go:26] "Reconciler: start to sync state" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.495208 4935 manager.go:324] Recovery completed Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.503571 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.503657 4935 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.506030 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.506072 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.506085 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.506509 4935 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.506733 4935 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.506772 4935 kubelet.go:2335] "Starting kubelet main sync loop" Dec 01 18:29:46 crc kubenswrapper[4935]: E1201 18:29:46.506837 4935 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.507469 4935 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.507492 4935 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.507520 4935 state_mem.go:36] "Initialized new in-memory state store" Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.508758 4935 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Dec 01 18:29:46 crc kubenswrapper[4935]: E1201 18:29:46.508827 4935 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.522388 4935 policy_none.go:49] "None policy: Start" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.523426 4935 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.523458 4935 state_mem.go:35] "Initializing new in-memory state store" Dec 01 18:29:46 crc kubenswrapper[4935]: E1201 18:29:46.544981 4935 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.580735 4935 manager.go:334] "Starting Device Plugin manager" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.580806 4935 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.580822 4935 server.go:79] "Starting device plugin registration server" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.581297 4935 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.581633 4935 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.581952 4935 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.582029 4935 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.582037 4935 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 01 18:29:46 crc kubenswrapper[4935]: E1201 18:29:46.590421 4935 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.606917 4935 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.607028 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.608240 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.608275 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.608287 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.608452 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.608823 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.608894 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.609333 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.609406 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.609428 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.609714 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.609947 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.610006 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.609960 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.610084 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.610098 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.611219 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.611247 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.611260 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.611326 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.611378 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.611401 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.611424 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.611692 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.611769 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.612076 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.612212 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.612236 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.612390 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.613045 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.613091 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.613933 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.613994 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.614019 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.614222 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.614254 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.614270 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.614324 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.614382 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.614330 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.614482 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.614510 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.615544 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.615618 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.615642 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:46 crc kubenswrapper[4935]: E1201 18:29:46.653696 4935 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="400ms" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.670680 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.670750 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.670796 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.670837 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.670874 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.670912 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.670945 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.670981 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.671013 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.671045 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.671091 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.671131 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.671196 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.671238 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.671269 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.682021 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.683775 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.683854 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.683871 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.683920 4935 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 18:29:46 crc kubenswrapper[4935]: E1201 18:29:46.684668 4935 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773063 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773219 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773245 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773262 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773281 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773302 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773319 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773334 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773352 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773369 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773383 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773371 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773418 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773398 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773459 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773511 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773610 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773607 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773614 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773671 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773651 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773618 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773659 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773733 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773745 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773607 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773810 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773829 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773791 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.773963 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.885359 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.886442 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.886533 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.886553 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.886581 4935 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 18:29:46 crc kubenswrapper[4935]: E1201 18:29:46.886982 4935 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.952917 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.964114 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.986043 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: I1201 18:29:46.993214 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.994140 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-a4658294f2867d4e19c6362e0b096c1756d42a55cd7e007d75f5c64f90397215 WatchSource:0}: Error finding container a4658294f2867d4e19c6362e0b096c1756d42a55cd7e007d75f5c64f90397215: Status 404 returned error can't find the container with id a4658294f2867d4e19c6362e0b096c1756d42a55cd7e007d75f5c64f90397215 Dec 01 18:29:46 crc kubenswrapper[4935]: W1201 18:29:46.999555 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-7f88e46c61c3bc52b3c2f6b8d67d06521d0ccc5a6c447ef0e93dacfee7af62ff WatchSource:0}: Error finding container 7f88e46c61c3bc52b3c2f6b8d67d06521d0ccc5a6c447ef0e93dacfee7af62ff: Status 404 returned error can't find the container with id 7f88e46c61c3bc52b3c2f6b8d67d06521d0ccc5a6c447ef0e93dacfee7af62ff Dec 01 18:29:47 crc kubenswrapper[4935]: W1201 18:29:47.012172 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-e68c19551cd6ed7de684dbcafea10f1c36840b3a0551a5dfb0e5f08fffb81f9e WatchSource:0}: Error finding container e68c19551cd6ed7de684dbcafea10f1c36840b3a0551a5dfb0e5f08fffb81f9e: Status 404 returned error can't find the container with id e68c19551cd6ed7de684dbcafea10f1c36840b3a0551a5dfb0e5f08fffb81f9e Dec 01 18:29:47 crc kubenswrapper[4935]: W1201 18:29:47.013111 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-81e0899c5fe342b6abfaac721e0135c2a42b280896994246bb2cadc44b786fbd WatchSource:0}: Error finding container 81e0899c5fe342b6abfaac721e0135c2a42b280896994246bb2cadc44b786fbd: Status 404 returned error can't find the container with id 81e0899c5fe342b6abfaac721e0135c2a42b280896994246bb2cadc44b786fbd Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.023162 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 18:29:47 crc kubenswrapper[4935]: W1201 18:29:47.045681 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-02b1a540dee4929fbb5d28ece9d324df180ff6e446757e70fd708e9d09f57ca4 WatchSource:0}: Error finding container 02b1a540dee4929fbb5d28ece9d324df180ff6e446757e70fd708e9d09f57ca4: Status 404 returned error can't find the container with id 02b1a540dee4929fbb5d28ece9d324df180ff6e446757e70fd708e9d09f57ca4 Dec 01 18:29:47 crc kubenswrapper[4935]: E1201 18:29:47.055370 4935 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="800ms" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.287482 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.289301 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.289364 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.289381 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.289419 4935 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 18:29:47 crc kubenswrapper[4935]: E1201 18:29:47.290047 4935 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.447521 4935 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Dec 01 18:29:47 crc kubenswrapper[4935]: W1201 18:29:47.470326 4935 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Dec 01 18:29:47 crc kubenswrapper[4935]: E1201 18:29:47.470471 4935 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Dec 01 18:29:47 crc kubenswrapper[4935]: W1201 18:29:47.480471 4935 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Dec 01 18:29:47 crc kubenswrapper[4935]: E1201 18:29:47.480515 4935 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.513090 4935 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952" exitCode=0 Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.513200 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952"} Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.513298 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"81e0899c5fe342b6abfaac721e0135c2a42b280896994246bb2cadc44b786fbd"} Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.513416 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.514824 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.514877 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.514888 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.516743 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa"} Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.516813 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e68c19551cd6ed7de684dbcafea10f1c36840b3a0551a5dfb0e5f08fffb81f9e"} Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.518464 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.518579 4935 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49" exitCode=0 Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.518659 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49"} Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.518709 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a4658294f2867d4e19c6362e0b096c1756d42a55cd7e007d75f5c64f90397215"} Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.518859 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.519341 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.519375 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.519391 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.519770 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.519799 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.519811 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.520795 4935 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343" exitCode=0 Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.520863 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343"} Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.520891 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"7f88e46c61c3bc52b3c2f6b8d67d06521d0ccc5a6c447ef0e93dacfee7af62ff"} Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.520962 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.522230 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.522270 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.522286 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.525057 4935 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c" exitCode=0 Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.525128 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c"} Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.525243 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"02b1a540dee4929fbb5d28ece9d324df180ff6e446757e70fd708e9d09f57ca4"} Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.525406 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.526791 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.526828 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:47 crc kubenswrapper[4935]: I1201 18:29:47.526841 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:47 crc kubenswrapper[4935]: W1201 18:29:47.837840 4935 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Dec 01 18:29:47 crc kubenswrapper[4935]: E1201 18:29:47.837967 4935 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Dec 01 18:29:47 crc kubenswrapper[4935]: E1201 18:29:47.857254 4935 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="1.6s" Dec 01 18:29:48 crc kubenswrapper[4935]: W1201 18:29:48.013321 4935 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Dec 01 18:29:48 crc kubenswrapper[4935]: E1201 18:29:48.013444 4935 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.090578 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.092478 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.092534 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.092554 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.092592 4935 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 18:29:48 crc kubenswrapper[4935]: E1201 18:29:48.093269 4935 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.65:6443: connect: connection refused" node="crc" Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.446832 4935 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.532092 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c"} Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.532141 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692"} Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.534903 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72"} Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.534986 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96"} Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.538003 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f"} Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.538040 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062"} Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.540974 4935 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd" exitCode=0 Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.541046 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd"} Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.541260 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.542850 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.542888 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.542907 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.545553 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"164dfe29cf41ca1bd53f96a771d1f3f816d690a08586e1e7d71272583a16e348"} Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.545679 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.546670 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.546700 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:48 crc kubenswrapper[4935]: I1201 18:29:48.546711 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:49 crc kubenswrapper[4935]: W1201 18:29:49.388975 4935 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Dec 01 18:29:49 crc kubenswrapper[4935]: E1201 18:29:49.389064 4935 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.65:6443: connect: connection refused" logger="UnhandledError" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.447070 4935 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.65:6443: connect: connection refused Dec 01 18:29:49 crc kubenswrapper[4935]: E1201 18:29:49.458758 4935 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="3.2s" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.549313 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095"} Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.549435 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.550300 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.550320 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.550330 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.552458 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf"} Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.552490 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e"} Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.552500 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa"} Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.552562 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.553018 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.553039 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.553047 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.554794 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.554794 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f"} Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.555362 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.555382 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.555390 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.556478 4935 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561" exitCode=0 Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.556506 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561"} Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.556574 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.557026 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.557047 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.557056 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.694113 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.695440 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.695468 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.695477 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:49 crc kubenswrapper[4935]: I1201 18:29:49.695498 4935 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.562211 4935 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.562269 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.567392 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5"} Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.567481 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d"} Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.567498 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4"} Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.567514 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f"} Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.567560 4935 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.567645 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.567570 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.569630 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.569665 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.569679 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.570497 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.570531 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.570560 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.570575 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.570560 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:50 crc kubenswrapper[4935]: I1201 18:29:50.570598 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:51 crc kubenswrapper[4935]: I1201 18:29:51.570824 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c"} Dec 01 18:29:51 crc kubenswrapper[4935]: I1201 18:29:51.570915 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:51 crc kubenswrapper[4935]: I1201 18:29:51.571592 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:51 crc kubenswrapper[4935]: I1201 18:29:51.571622 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:51 crc kubenswrapper[4935]: I1201 18:29:51.571631 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:52 crc kubenswrapper[4935]: I1201 18:29:52.513237 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:52 crc kubenswrapper[4935]: I1201 18:29:52.513392 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:52 crc kubenswrapper[4935]: I1201 18:29:52.514563 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:52 crc kubenswrapper[4935]: I1201 18:29:52.514598 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:52 crc kubenswrapper[4935]: I1201 18:29:52.514606 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:52 crc kubenswrapper[4935]: I1201 18:29:52.572623 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:52 crc kubenswrapper[4935]: I1201 18:29:52.573679 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:52 crc kubenswrapper[4935]: I1201 18:29:52.573719 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:52 crc kubenswrapper[4935]: I1201 18:29:52.573728 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:52 crc kubenswrapper[4935]: I1201 18:29:52.857560 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.443395 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.443591 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.446104 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.446253 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.446281 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.451082 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.545568 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.575878 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.575940 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.575878 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.576844 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.576874 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.576883 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.576927 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.576962 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.576983 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.612077 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.612322 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.613277 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.613334 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.613347 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:53 crc kubenswrapper[4935]: I1201 18:29:53.742456 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:29:54 crc kubenswrapper[4935]: I1201 18:29:54.579085 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:54 crc kubenswrapper[4935]: I1201 18:29:54.580182 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:54 crc kubenswrapper[4935]: I1201 18:29:54.580733 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:54 crc kubenswrapper[4935]: I1201 18:29:54.580759 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:54 crc kubenswrapper[4935]: I1201 18:29:54.580768 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:54 crc kubenswrapper[4935]: I1201 18:29:54.581101 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:54 crc kubenswrapper[4935]: I1201 18:29:54.581124 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:54 crc kubenswrapper[4935]: I1201 18:29:54.581139 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:54 crc kubenswrapper[4935]: I1201 18:29:54.684218 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 01 18:29:54 crc kubenswrapper[4935]: I1201 18:29:54.684359 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:54 crc kubenswrapper[4935]: I1201 18:29:54.685753 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:54 crc kubenswrapper[4935]: I1201 18:29:54.685849 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:54 crc kubenswrapper[4935]: I1201 18:29:54.685912 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:55 crc kubenswrapper[4935]: I1201 18:29:55.033574 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 18:29:55 crc kubenswrapper[4935]: I1201 18:29:55.034127 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:55 crc kubenswrapper[4935]: I1201 18:29:55.036222 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:55 crc kubenswrapper[4935]: I1201 18:29:55.036656 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:55 crc kubenswrapper[4935]: I1201 18:29:55.036760 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:56 crc kubenswrapper[4935]: I1201 18:29:56.445570 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:56 crc kubenswrapper[4935]: I1201 18:29:56.445769 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:56 crc kubenswrapper[4935]: I1201 18:29:56.447058 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:56 crc kubenswrapper[4935]: I1201 18:29:56.447141 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:56 crc kubenswrapper[4935]: I1201 18:29:56.447206 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:56 crc kubenswrapper[4935]: I1201 18:29:56.546034 4935 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 01 18:29:56 crc kubenswrapper[4935]: I1201 18:29:56.546112 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 18:29:56 crc kubenswrapper[4935]: E1201 18:29:56.591452 4935 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 01 18:29:59 crc kubenswrapper[4935]: I1201 18:29:59.108722 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:29:59 crc kubenswrapper[4935]: I1201 18:29:59.108860 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:29:59 crc kubenswrapper[4935]: I1201 18:29:59.110170 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:29:59 crc kubenswrapper[4935]: I1201 18:29:59.110207 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:29:59 crc kubenswrapper[4935]: I1201 18:29:59.110217 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:29:59 crc kubenswrapper[4935]: I1201 18:29:59.302855 4935 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 01 18:29:59 crc kubenswrapper[4935]: I1201 18:29:59.302905 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 01 18:29:59 crc kubenswrapper[4935]: I1201 18:29:59.309772 4935 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 01 18:29:59 crc kubenswrapper[4935]: I1201 18:29:59.309850 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 01 18:30:02 crc kubenswrapper[4935]: I1201 18:30:02.514811 4935 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 01 18:30:02 crc kubenswrapper[4935]: I1201 18:30:02.514924 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 01 18:30:03 crc kubenswrapper[4935]: I1201 18:30:03.617420 4935 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 01 18:30:03 crc kubenswrapper[4935]: I1201 18:30:03.617534 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 01 18:30:03 crc kubenswrapper[4935]: I1201 18:30:03.750097 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:30:03 crc kubenswrapper[4935]: I1201 18:30:03.750323 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:30:03 crc kubenswrapper[4935]: I1201 18:30:03.751058 4935 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 01 18:30:03 crc kubenswrapper[4935]: I1201 18:30:03.751213 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 01 18:30:03 crc kubenswrapper[4935]: I1201 18:30:03.751983 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:03 crc kubenswrapper[4935]: I1201 18:30:03.752029 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:03 crc kubenswrapper[4935]: I1201 18:30:03.752046 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:03 crc kubenswrapper[4935]: I1201 18:30:03.762541 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.294328 4935 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.296355 4935 trace.go:236] Trace[493031360]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 18:29:50.246) (total time: 14049ms): Dec 01 18:30:04 crc kubenswrapper[4935]: Trace[493031360]: ---"Objects listed" error: 14049ms (18:30:04.296) Dec 01 18:30:04 crc kubenswrapper[4935]: Trace[493031360]: [14.049683202s] [14.049683202s] END Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.296400 4935 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.299096 4935 trace.go:236] Trace[1726467825]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 18:29:53.306) (total time: 10992ms): Dec 01 18:30:04 crc kubenswrapper[4935]: Trace[1726467825]: ---"Objects listed" error: 10992ms (18:30:04.298) Dec 01 18:30:04 crc kubenswrapper[4935]: Trace[1726467825]: [10.992655408s] [10.992655408s] END Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.299116 4935 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.301776 4935 trace.go:236] Trace[2022042252]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 18:29:50.457) (total time: 13843ms): Dec 01 18:30:04 crc kubenswrapper[4935]: Trace[2022042252]: ---"Objects listed" error: 13843ms (18:30:04.301) Dec 01 18:30:04 crc kubenswrapper[4935]: Trace[2022042252]: [13.843956236s] [13.843956236s] END Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.301800 4935 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.302806 4935 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.303584 4935 trace.go:236] Trace[803715629]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 18:29:49.699) (total time: 14603ms): Dec 01 18:30:04 crc kubenswrapper[4935]: Trace[803715629]: ---"Objects listed" error: 14603ms (18:30:04.303) Dec 01 18:30:04 crc kubenswrapper[4935]: Trace[803715629]: [14.603764047s] [14.603764047s] END Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.303614 4935 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.303696 4935 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.369362 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.375978 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.448170 4935 apiserver.go:52] "Watching apiserver" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.451303 4935 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.451544 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.451836 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.451868 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.451935 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.452173 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.452232 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.452702 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.452742 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.453242 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.452757 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.455724 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.455925 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.456811 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.456938 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.456990 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.457006 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.457491 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.457771 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.457981 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.484600 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.502277 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.523079 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.540333 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.545457 4935 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.550849 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.560598 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.571396 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.580601 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603699 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603739 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603756 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603776 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603811 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603827 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603843 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603860 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603875 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603912 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603930 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603946 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603960 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.603989 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604010 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604025 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604044 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604061 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604078 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604094 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604109 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604127 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604161 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604181 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604196 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604210 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604231 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604247 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604260 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604276 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604290 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604305 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604325 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604340 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604357 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604374 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604388 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604402 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604417 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604432 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604446 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604461 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604478 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604494 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604510 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604525 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604540 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604556 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604573 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604590 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604606 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604624 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604640 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604658 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604674 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604691 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604719 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604733 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604747 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604763 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604776 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604791 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604805 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604819 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604835 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604849 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604863 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604879 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604896 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604916 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604938 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604958 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.604978 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605000 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605020 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605040 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605055 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605071 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605086 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605132 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605165 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605182 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605199 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605215 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605231 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605249 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605266 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605285 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605303 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605323 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605340 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605359 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605377 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605395 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605414 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605430 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605445 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605460 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605476 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605476 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605492 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605566 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605592 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605611 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605630 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605649 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605666 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605685 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605702 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605719 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605731 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605743 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605761 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605777 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605794 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605814 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605831 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605850 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605866 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605883 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605899 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605906 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605914 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605932 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605950 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605970 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.605988 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606006 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606022 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606038 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606044 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606056 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606074 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606091 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606107 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606123 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606140 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606174 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606193 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606193 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606212 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606230 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606246 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606265 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606282 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606298 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606320 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606333 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606339 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606370 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606394 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606411 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606427 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606443 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606460 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606477 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606497 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606516 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606534 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606551 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606568 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606586 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606602 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606619 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606634 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606651 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606670 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606688 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606704 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606706 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606724 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606743 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606760 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606778 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606795 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606815 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606832 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606849 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606866 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606922 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606940 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606958 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606974 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.606990 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.607008 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.607024 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.607040 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.607056 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.607088 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.607117 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.607137 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610261 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610290 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610320 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610347 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610373 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610402 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610432 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610461 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610489 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610519 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610545 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610570 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610622 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610652 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610672 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610695 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610714 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610739 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610763 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610785 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610812 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610833 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610856 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610895 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610999 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611021 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611070 4935 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611082 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611093 4935 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611103 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611114 4935 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611125 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611137 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611162 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.624474 4935 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.608063 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.608182 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.608232 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.608237 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.608434 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.608466 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.608603 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.608725 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.608735 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.608755 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.608901 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.608973 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.609067 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.609025 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.609116 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.609281 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.609321 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.609545 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.609554 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.609578 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.609825 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.609832 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.609833 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.609953 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.636785 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.636823 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610125 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.636882 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611054 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611066 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611196 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611284 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611667 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611681 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.611670 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.612067 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.612133 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.612439 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.612516 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.612549 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.612566 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.612762 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.613166 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.613180 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.613204 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.613261 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.613464 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.613636 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.613896 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.614379 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.614393 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.614575 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.614848 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.615225 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.615404 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.615554 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.615773 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.615978 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.616050 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.616083 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.616307 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.616310 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.616354 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.616371 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.616486 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.616780 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.616952 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.617120 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.617413 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.617730 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.618371 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.618931 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.619211 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.619344 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.619560 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.620440 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.620534 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.620945 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.621016 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.620809 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.621232 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.621584 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.621601 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.622622 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.622631 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.622633 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.622682 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.622876 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.622867 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.623266 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.623300 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.623352 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.623381 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.623643 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.623788 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.624028 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.624277 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.626576 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.627035 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.627129 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.627167 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.627601 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.627692 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.628067 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.628267 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.628341 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.628714 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.628873 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.628875 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.628925 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.629306 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.630028 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.630200 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.630448 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.630458 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.630595 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:30:05.130570635 +0000 UTC m=+19.152199894 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.631043 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.631202 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.630864 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.631621 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.631653 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.631950 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.632082 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.632498 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.632637 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.633361 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.634166 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.634964 4935 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.635259 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.635593 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.636267 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.636568 4935 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.610045 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.638089 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.638859 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.638994 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.641865 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.642981 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.643381 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.643307 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.643605 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.643677 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.643795 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.643945 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:05.143913351 +0000 UTC m=+19.165542620 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.644016 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:05.144004984 +0000 UTC m=+19.165634253 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.644535 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.644590 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.644621 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.644721 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.644844 4935 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.645045 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.645097 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.645141 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.645862 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.645989 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.645999 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.646085 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.646744 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.646850 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.646941 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.647178 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.647196 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.647727 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.648557 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.648735 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.648740 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.649711 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.650073 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.650754 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.651341 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.651379 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.655776 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.656068 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.656419 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.657470 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.657620 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.658785 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.658901 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.661677 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.661985 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.662197 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.662243 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.662260 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.662889 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.663511 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.664091 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.664254 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.664452 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.664604 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.664565 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.664822 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.665366 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.664986 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.667322 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.667347 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.667363 4935 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.667430 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:05.167410733 +0000 UTC m=+19.189039992 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.674626 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.686677 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.686708 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.686724 4935 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:04 crc kubenswrapper[4935]: E1201 18:30:04.686780 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:05.186762436 +0000 UTC m=+19.208391695 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.687218 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.687525 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.690637 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.691960 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.700361 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.701661 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.703706 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.711762 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.711795 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.711879 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.711910 4935 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.711920 4935 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.711929 4935 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.711939 4935 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.711949 4935 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.711957 4935 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.711965 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.711974 4935 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.711976 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.711983 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712004 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712015 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712025 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712034 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712044 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712052 4935 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712061 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712070 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712078 4935 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712086 4935 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712094 4935 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712102 4935 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712110 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712119 4935 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712129 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712137 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712163 4935 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712172 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712181 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712188 4935 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712198 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712207 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712215 4935 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712224 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712234 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712243 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712252 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712260 4935 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712268 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712276 4935 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712554 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712568 4935 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712577 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712586 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712594 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712604 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712612 4935 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712621 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712629 4935 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712640 4935 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712648 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712657 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712665 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712674 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712683 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712693 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712702 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712710 4935 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712719 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712728 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712763 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712774 4935 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712782 4935 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712963 4935 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712978 4935 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712988 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.712998 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713007 4935 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713016 4935 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713025 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713033 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713042 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713050 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713059 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713067 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713076 4935 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713084 4935 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713093 4935 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713102 4935 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713110 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713119 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713128 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713136 4935 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713164 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713173 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713182 4935 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713192 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713201 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713210 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713219 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713228 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713236 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713244 4935 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713252 4935 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713260 4935 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713268 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713276 4935 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713285 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713294 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713303 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713312 4935 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713321 4935 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713330 4935 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713339 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713348 4935 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713357 4935 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713366 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713375 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713384 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713393 4935 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713403 4935 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713411 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713437 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713447 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713455 4935 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713464 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713473 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713483 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713516 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713525 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713534 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713543 4935 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713551 4935 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713560 4935 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713570 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713578 4935 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713586 4935 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713594 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713603 4935 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713612 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713620 4935 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713633 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713643 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713651 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713661 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713672 4935 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713680 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713688 4935 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713696 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713718 4935 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713728 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713804 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713817 4935 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713926 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713935 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713944 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713953 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713962 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713971 4935 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713978 4935 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713986 4935 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.713997 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714005 4935 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714014 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714022 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714030 4935 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714038 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714046 4935 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714055 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714064 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714074 4935 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714082 4935 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714091 4935 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714099 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714107 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714116 4935 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714125 4935 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714133 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714158 4935 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714168 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714176 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714184 4935 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714192 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714201 4935 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714210 4935 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714218 4935 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714257 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714269 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714281 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714291 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714299 4935 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714307 4935 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714315 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714327 4935 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714334 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714343 4935 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714351 4935 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.714361 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.723299 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.731306 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.732666 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.738560 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.743267 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.743888 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.761699 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.766038 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.780802 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.781177 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.781207 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.810296 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.819926 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.839759 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.852881 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.870512 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.889406 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.897739 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.906245 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.915093 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.930932 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.946988 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:04 crc kubenswrapper[4935]: I1201 18:30:04.956667 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.218263 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.218346 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.218388 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:30:06.218358553 +0000 UTC m=+20.239987812 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.218434 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.218468 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.218488 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.218500 4935 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.218546 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:06.218531988 +0000 UTC m=+20.240161247 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.218549 4935 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.218586 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:06.218578 +0000 UTC m=+20.240207259 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.218469 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.218597 4935 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.218665 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.218677 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.218687 4935 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.218614 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.218687 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:06.218664703 +0000 UTC m=+20.240294042 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.218720 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:06.218713504 +0000 UTC m=+20.240342763 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.625953 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424"} Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.626054 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009"} Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.626089 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"126d7dfe120f41c9dd47d5b2a1a9a33735e5d3467c387941f9a840b9bbc32db7"} Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.628259 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"7de5c5aff0e6df43f896f1b42a9c7e441f1ffc1f12d3fa07f61a478e9786e44c"} Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.631044 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e"} Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.631094 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"a25ffae50fb63ec824c39fc553da9ebf4dfbd0a87e7f42fdfe1be4db88b9e52b"} Dec 01 18:30:05 crc kubenswrapper[4935]: E1201 18:30:05.643929 4935 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.650834 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.671137 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.691784 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.710871 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.730719 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.744367 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.768191 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.783756 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.806411 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.823479 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.839364 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.860310 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.876550 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.890552 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.906600 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.922209 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.937504 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:05 crc kubenswrapper[4935]: I1201 18:30:05.963777 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:05Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.226185 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.226335 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.226384 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.226429 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.226471 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.226702 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.226745 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.226770 4935 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.226811 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:30:08.226777851 +0000 UTC m=+22.248407110 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.226864 4935 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.226965 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.227025 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.227051 4935 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.226968 4935 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.226880 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:08.226850333 +0000 UTC m=+22.248479632 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.227199 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:08.227169453 +0000 UTC m=+22.248798722 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.227219 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:08.227208914 +0000 UTC m=+22.248838193 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.227235 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:08.227226675 +0000 UTC m=+22.248855954 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.507420 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.507519 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.507676 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.507731 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.507858 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:06 crc kubenswrapper[4935]: E1201 18:30:06.508021 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.513052 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.514670 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.518017 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.519802 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.522486 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.524046 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.525154 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.527201 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.528279 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.529084 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.529686 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.530397 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.530888 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.531456 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.531978 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.532525 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.532587 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.533105 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.533561 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.534112 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.534692 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.535209 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.535796 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.536272 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.536914 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.538496 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.539399 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.540407 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.541603 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.542239 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.543209 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.543759 4935 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.543928 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.546266 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.546949 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.546929 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.547652 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.549394 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.551039 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.551707 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.552799 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.553575 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.554606 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.555314 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.556488 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.557544 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.558088 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.559051 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.559730 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.560080 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.561365 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.561906 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.562623 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.563550 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.564143 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.565262 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.565815 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.573781 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.590979 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.613315 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.631634 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.656388 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:06 crc kubenswrapper[4935]: I1201 18:30:06.670468 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.504094 4935 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.505881 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.505917 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.505927 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.505987 4935 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.514764 4935 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.515043 4935 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.516168 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.516342 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.516488 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.516641 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.516809 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:07Z","lastTransitionTime":"2025-12-01T18:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:07 crc kubenswrapper[4935]: E1201 18:30:07.539518 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.545602 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.545663 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.545676 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.545697 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.545712 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:07Z","lastTransitionTime":"2025-12-01T18:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:07 crc kubenswrapper[4935]: E1201 18:30:07.562967 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.567875 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.567916 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.567925 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.567945 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.567958 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:07Z","lastTransitionTime":"2025-12-01T18:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:07 crc kubenswrapper[4935]: E1201 18:30:07.584316 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.590549 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.590607 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.590621 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.590642 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.590658 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:07Z","lastTransitionTime":"2025-12-01T18:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:07 crc kubenswrapper[4935]: E1201 18:30:07.606091 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.611274 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.611461 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.611576 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.611680 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.611788 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:07Z","lastTransitionTime":"2025-12-01T18:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:07 crc kubenswrapper[4935]: E1201 18:30:07.630542 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: E1201 18:30:07.630722 4935 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.632603 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.632639 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.632647 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.632661 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.632674 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:07Z","lastTransitionTime":"2025-12-01T18:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.636408 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c"} Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.654034 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.671067 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.688192 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.702369 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.719673 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.735037 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.735110 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.735176 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.735204 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.735220 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:07Z","lastTransitionTime":"2025-12-01T18:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.742550 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.758115 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.778665 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.793130 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:07Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.838003 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.838398 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.838513 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.838612 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.838696 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:07Z","lastTransitionTime":"2025-12-01T18:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.941343 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.941454 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.941476 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.941509 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:07 crc kubenswrapper[4935]: I1201 18:30:07.941530 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:07Z","lastTransitionTime":"2025-12-01T18:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.044691 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.044762 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.044782 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.044808 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.044827 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:08Z","lastTransitionTime":"2025-12-01T18:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.147081 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.147136 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.147174 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.147231 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.147249 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:08Z","lastTransitionTime":"2025-12-01T18:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.245797 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.245925 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.245971 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.246014 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.246058 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.246199 4935 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.246263 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.246296 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.246321 4935 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.246341 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:12.24629396 +0000 UTC m=+26.267923259 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.246259 4935 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.246387 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:12.246366843 +0000 UTC m=+26.267996142 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.246417 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:12.246399974 +0000 UTC m=+26.268029273 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.246519 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:30:12.246501527 +0000 UTC m=+26.268130836 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.246594 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.246666 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.246707 4935 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.246886 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:12.246836527 +0000 UTC m=+26.268465906 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.249903 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.249935 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.249943 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.249960 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.249970 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:08Z","lastTransitionTime":"2025-12-01T18:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.352426 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.352473 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.352482 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.352498 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.352508 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:08Z","lastTransitionTime":"2025-12-01T18:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.455001 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.455055 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.455069 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.455089 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.455101 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:08Z","lastTransitionTime":"2025-12-01T18:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.507073 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.507114 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.507097 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.507222 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.507310 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:08 crc kubenswrapper[4935]: E1201 18:30:08.507376 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.557651 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.557705 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.557716 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.557739 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.557753 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:08Z","lastTransitionTime":"2025-12-01T18:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.659610 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.659686 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.659716 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.659753 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.659779 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:08Z","lastTransitionTime":"2025-12-01T18:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.762982 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.763045 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.763063 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.763092 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.763114 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:08Z","lastTransitionTime":"2025-12-01T18:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.866482 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.866542 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.866554 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.866575 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.866597 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:08Z","lastTransitionTime":"2025-12-01T18:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.970595 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.970670 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.970688 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.970714 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:08 crc kubenswrapper[4935]: I1201 18:30:08.970734 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:08Z","lastTransitionTime":"2025-12-01T18:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.073645 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.073683 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.073692 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.073705 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.073715 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:09Z","lastTransitionTime":"2025-12-01T18:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.176364 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.176433 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.176452 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.176478 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.176498 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:09Z","lastTransitionTime":"2025-12-01T18:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.278923 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.278983 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.278999 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.279016 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.279028 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:09Z","lastTransitionTime":"2025-12-01T18:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.384772 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.384844 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.384860 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.384887 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.384906 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:09Z","lastTransitionTime":"2025-12-01T18:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.488049 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.488134 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.488169 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.488189 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.488201 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:09Z","lastTransitionTime":"2025-12-01T18:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.591588 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.591635 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.591647 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.591662 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.591676 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:09Z","lastTransitionTime":"2025-12-01T18:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.694762 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.694812 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.694823 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.694842 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.694859 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:09Z","lastTransitionTime":"2025-12-01T18:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.797519 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.797572 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.797584 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.797608 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.797627 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:09Z","lastTransitionTime":"2025-12-01T18:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.899838 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.899887 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.899897 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.899911 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:09 crc kubenswrapper[4935]: I1201 18:30:09.899920 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:09Z","lastTransitionTime":"2025-12-01T18:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.002350 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.002428 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.002451 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.002489 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.002515 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:10Z","lastTransitionTime":"2025-12-01T18:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.105317 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.105407 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.105432 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.105463 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.105483 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:10Z","lastTransitionTime":"2025-12-01T18:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.208966 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.209047 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.209071 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.209106 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.209131 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:10Z","lastTransitionTime":"2025-12-01T18:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.311599 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.311655 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.311666 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.311683 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.311696 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:10Z","lastTransitionTime":"2025-12-01T18:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.414051 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.414086 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.414095 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.414109 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.414118 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:10Z","lastTransitionTime":"2025-12-01T18:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.507752 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.507817 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:10 crc kubenswrapper[4935]: E1201 18:30:10.507880 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:10 crc kubenswrapper[4935]: E1201 18:30:10.507954 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.507997 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:10 crc kubenswrapper[4935]: E1201 18:30:10.508132 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.516379 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.516419 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.516430 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.516445 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.516457 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:10Z","lastTransitionTime":"2025-12-01T18:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.619385 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.619450 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.619468 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.619492 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.619509 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:10Z","lastTransitionTime":"2025-12-01T18:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.722701 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.722765 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.722781 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.722806 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.722857 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:10Z","lastTransitionTime":"2025-12-01T18:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.825629 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.825699 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.825716 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.825741 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.825758 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:10Z","lastTransitionTime":"2025-12-01T18:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.928560 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.928614 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.928628 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.928651 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:10 crc kubenswrapper[4935]: I1201 18:30:10.928666 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:10Z","lastTransitionTime":"2025-12-01T18:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.031314 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.031372 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.031387 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.031408 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.031425 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:11Z","lastTransitionTime":"2025-12-01T18:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.133415 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.133472 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.133480 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.133495 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.133505 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:11Z","lastTransitionTime":"2025-12-01T18:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.235776 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.235812 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.235821 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.235835 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.235845 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:11Z","lastTransitionTime":"2025-12-01T18:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.338485 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.338528 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.338540 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.338555 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.338566 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:11Z","lastTransitionTime":"2025-12-01T18:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.441209 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.441274 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.441287 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.441303 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.441314 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:11Z","lastTransitionTime":"2025-12-01T18:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.544375 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.544432 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.544443 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.544460 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.544474 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:11Z","lastTransitionTime":"2025-12-01T18:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.647160 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.647208 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.647222 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.647236 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.647245 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:11Z","lastTransitionTime":"2025-12-01T18:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.749484 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.749537 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.749546 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.749570 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.749582 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:11Z","lastTransitionTime":"2025-12-01T18:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.851871 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.851910 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.851919 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.851935 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.851945 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:11Z","lastTransitionTime":"2025-12-01T18:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.948554 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-zznnp"] Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.949772 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-f64dz"] Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.950103 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.951480 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.953913 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-jzx4x"] Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.954477 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.954516 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.954527 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.954538 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.954547 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:11Z","lastTransitionTime":"2025-12-01T18:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.954675 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jzx4x" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.957603 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-26rsx"] Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.957901 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-26rsx" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.958863 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.958907 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.959342 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.959656 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.959884 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.960013 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.961619 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.963638 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.963678 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.966327 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.966367 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.966401 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.966421 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.966509 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.966527 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 01 18:30:11 crc kubenswrapper[4935]: I1201 18:30:11.991855 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:11Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.008290 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.023854 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.039392 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.056421 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.056451 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.056459 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.056472 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.056481 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:12Z","lastTransitionTime":"2025-12-01T18:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.058242 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.073185 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.079522 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/839b5110-76e3-4c2f-80aa-1f2c0485e231-hosts-file\") pod \"node-resolver-26rsx\" (UID: \"839b5110-76e3-4c2f-80aa-1f2c0485e231\") " pod="openshift-dns/node-resolver-26rsx" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.079553 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b2a9250f-1d40-44de-ace0-dc64bc7bb803-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.079569 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522-mcd-auth-proxy-config\") pod \"machine-config-daemon-zznnp\" (UID: \"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\") " pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.079591 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b2a9250f-1d40-44de-ace0-dc64bc7bb803-system-cni-dir\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.079607 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-cnibin\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.079753 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b2a9250f-1d40-44de-ace0-dc64bc7bb803-cnibin\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.079861 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3f7b45c6-7cf7-420d-afb3-ea00b791af58-cni-binary-copy\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.079906 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-var-lib-cni-bin\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.079954 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-hostroot\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080028 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522-rootfs\") pod \"machine-config-daemon-zznnp\" (UID: \"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\") " pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080078 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-multus-conf-dir\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080134 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-etc-kubernetes\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080177 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522-proxy-tls\") pod \"machine-config-daemon-zznnp\" (UID: \"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\") " pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080197 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsmv6\" (UniqueName: \"kubernetes.io/projected/56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522-kube-api-access-gsmv6\") pod \"machine-config-daemon-zznnp\" (UID: \"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\") " pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080217 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b2a9250f-1d40-44de-ace0-dc64bc7bb803-os-release\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080234 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4wzb\" (UniqueName: \"kubernetes.io/projected/3f7b45c6-7cf7-420d-afb3-ea00b791af58-kube-api-access-p4wzb\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080255 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b2a9250f-1d40-44de-ace0-dc64bc7bb803-cni-binary-copy\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080287 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-multus-socket-dir-parent\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080307 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-var-lib-cni-multus\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080376 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-var-lib-kubelet\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080427 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3f7b45c6-7cf7-420d-afb3-ea00b791af58-multus-daemon-config\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080469 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-multus-cni-dir\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080485 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-os-release\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080505 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-run-k8s-cni-cncf-io\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080524 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-run-netns\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080542 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-run-multus-certs\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080568 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9xk6\" (UniqueName: \"kubernetes.io/projected/839b5110-76e3-4c2f-80aa-1f2c0485e231-kube-api-access-z9xk6\") pod \"node-resolver-26rsx\" (UID: \"839b5110-76e3-4c2f-80aa-1f2c0485e231\") " pod="openshift-dns/node-resolver-26rsx" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080627 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b2a9250f-1d40-44de-ace0-dc64bc7bb803-tuning-conf-dir\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080644 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kd257\" (UniqueName: \"kubernetes.io/projected/b2a9250f-1d40-44de-ace0-dc64bc7bb803-kube-api-access-kd257\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.080666 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-system-cni-dir\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.092352 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.107539 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.122355 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.135776 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.148615 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.158620 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.158652 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.158680 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.158694 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.158705 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:12Z","lastTransitionTime":"2025-12-01T18:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.161962 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.172648 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181233 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b2a9250f-1d40-44de-ace0-dc64bc7bb803-tuning-conf-dir\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181284 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kd257\" (UniqueName: \"kubernetes.io/projected/b2a9250f-1d40-44de-ace0-dc64bc7bb803-kube-api-access-kd257\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181317 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-system-cni-dir\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181351 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/839b5110-76e3-4c2f-80aa-1f2c0485e231-hosts-file\") pod \"node-resolver-26rsx\" (UID: \"839b5110-76e3-4c2f-80aa-1f2c0485e231\") " pod="openshift-dns/node-resolver-26rsx" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181383 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522-mcd-auth-proxy-config\") pod \"machine-config-daemon-zznnp\" (UID: \"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\") " pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181413 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b2a9250f-1d40-44de-ace0-dc64bc7bb803-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181446 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b2a9250f-1d40-44de-ace0-dc64bc7bb803-system-cni-dir\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181477 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-cnibin\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181509 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b2a9250f-1d40-44de-ace0-dc64bc7bb803-cnibin\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181498 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-system-cni-dir\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181531 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/839b5110-76e3-4c2f-80aa-1f2c0485e231-hosts-file\") pod \"node-resolver-26rsx\" (UID: \"839b5110-76e3-4c2f-80aa-1f2c0485e231\") " pod="openshift-dns/node-resolver-26rsx" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181547 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b2a9250f-1d40-44de-ace0-dc64bc7bb803-system-cni-dir\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181617 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-hostroot\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181553 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-hostroot\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181570 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b2a9250f-1d40-44de-ace0-dc64bc7bb803-cnibin\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181671 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522-rootfs\") pod \"machine-config-daemon-zznnp\" (UID: \"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\") " pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181694 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3f7b45c6-7cf7-420d-afb3-ea00b791af58-cni-binary-copy\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.182169 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b2a9250f-1d40-44de-ace0-dc64bc7bb803-tuning-conf-dir\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.182205 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-cnibin\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.182351 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-var-lib-cni-bin\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.182547 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522-mcd-auth-proxy-config\") pod \"machine-config-daemon-zznnp\" (UID: \"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\") " pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.182784 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b2a9250f-1d40-44de-ace0-dc64bc7bb803-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.183257 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3f7b45c6-7cf7-420d-afb3-ea00b791af58-cni-binary-copy\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181713 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-var-lib-cni-bin\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.181714 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522-rootfs\") pod \"machine-config-daemon-zznnp\" (UID: \"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\") " pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.184601 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-multus-conf-dir\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.184671 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-multus-conf-dir\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.184714 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-etc-kubernetes\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.184759 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522-proxy-tls\") pod \"machine-config-daemon-zznnp\" (UID: \"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\") " pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.184811 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-etc-kubernetes\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.184807 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsmv6\" (UniqueName: \"kubernetes.io/projected/56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522-kube-api-access-gsmv6\") pod \"machine-config-daemon-zznnp\" (UID: \"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\") " pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.184878 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b2a9250f-1d40-44de-ace0-dc64bc7bb803-os-release\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.184910 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4wzb\" (UniqueName: \"kubernetes.io/projected/3f7b45c6-7cf7-420d-afb3-ea00b791af58-kube-api-access-p4wzb\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.184933 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b2a9250f-1d40-44de-ace0-dc64bc7bb803-cni-binary-copy\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.184957 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-multus-socket-dir-parent\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.184977 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-var-lib-cni-multus\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185000 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-var-lib-kubelet\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185021 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3f7b45c6-7cf7-420d-afb3-ea00b791af58-multus-daemon-config\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185057 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-multus-cni-dir\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185081 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-os-release\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185103 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-run-k8s-cni-cncf-io\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185124 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-run-netns\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185158 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-run-multus-certs\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185180 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9xk6\" (UniqueName: \"kubernetes.io/projected/839b5110-76e3-4c2f-80aa-1f2c0485e231-kube-api-access-z9xk6\") pod \"node-resolver-26rsx\" (UID: \"839b5110-76e3-4c2f-80aa-1f2c0485e231\") " pod="openshift-dns/node-resolver-26rsx" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185386 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b2a9250f-1d40-44de-ace0-dc64bc7bb803-os-release\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185437 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-run-k8s-cni-cncf-io\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185472 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-run-netns\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185489 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b2a9250f-1d40-44de-ace0-dc64bc7bb803-cni-binary-copy\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185477 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-os-release\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185512 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-var-lib-cni-multus\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185513 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-var-lib-kubelet\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185573 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-multus-socket-dir-parent\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185569 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-host-run-multus-certs\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185972 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3f7b45c6-7cf7-420d-afb3-ea00b791af58-multus-daemon-config\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.185991 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f7b45c6-7cf7-420d-afb3-ea00b791af58-multus-cni-dir\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.189603 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522-proxy-tls\") pod \"machine-config-daemon-zznnp\" (UID: \"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\") " pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.196067 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.204004 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsmv6\" (UniqueName: \"kubernetes.io/projected/56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522-kube-api-access-gsmv6\") pod \"machine-config-daemon-zznnp\" (UID: \"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\") " pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.204092 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kd257\" (UniqueName: \"kubernetes.io/projected/b2a9250f-1d40-44de-ace0-dc64bc7bb803-kube-api-access-kd257\") pod \"multus-additional-cni-plugins-f64dz\" (UID: \"b2a9250f-1d40-44de-ace0-dc64bc7bb803\") " pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.205245 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4wzb\" (UniqueName: \"kubernetes.io/projected/3f7b45c6-7cf7-420d-afb3-ea00b791af58-kube-api-access-p4wzb\") pod \"multus-jzx4x\" (UID: \"3f7b45c6-7cf7-420d-afb3-ea00b791af58\") " pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.210218 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.212343 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9xk6\" (UniqueName: \"kubernetes.io/projected/839b5110-76e3-4c2f-80aa-1f2c0485e231-kube-api-access-z9xk6\") pod \"node-resolver-26rsx\" (UID: \"839b5110-76e3-4c2f-80aa-1f2c0485e231\") " pod="openshift-dns/node-resolver-26rsx" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.223103 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.240611 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.253897 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.261981 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.262020 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.262030 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.262049 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.262066 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:12Z","lastTransitionTime":"2025-12-01T18:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.267525 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.267902 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.277203 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-f64dz" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.285575 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.285700 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.285745 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.285788 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:30:20.28574254 +0000 UTC m=+34.307371809 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.285841 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.285895 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.285914 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.285921 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.285986 4935 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.285995 4935 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.286044 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:20.286023869 +0000 UTC m=+34.307653168 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.286072 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:20.2860588 +0000 UTC m=+34.307688089 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.286112 4935 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.286171 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:20.286143293 +0000 UTC m=+34.307772622 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.286278 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.286299 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.286309 4935 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.286339 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:20.286332099 +0000 UTC m=+34.307961358 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.286428 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jzx4x" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.294486 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-26rsx" Dec 01 18:30:12 crc kubenswrapper[4935]: W1201 18:30:12.294594 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2a9250f_1d40_44de_ace0_dc64bc7bb803.slice/crio-22e2a8c7e5497f94ba8adeca97cfca71b75335a5315a18fa0bbca9f49674511f WatchSource:0}: Error finding container 22e2a8c7e5497f94ba8adeca97cfca71b75335a5315a18fa0bbca9f49674511f: Status 404 returned error can't find the container with id 22e2a8c7e5497f94ba8adeca97cfca71b75335a5315a18fa0bbca9f49674511f Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.295028 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.313625 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.341631 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.366576 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.371006 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4s97m"] Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.371758 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.379209 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.379249 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.379260 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.379278 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.379290 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:12Z","lastTransitionTime":"2025-12-01T18:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.379994 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.380301 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.380977 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.381120 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.381346 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.381563 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.381735 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.411478 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.428611 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.456542 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.476833 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.484740 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.484787 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.484799 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.484818 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.484828 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:12Z","lastTransitionTime":"2025-12-01T18:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.488674 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-log-socket\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.488708 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-ovn\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.488728 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovn-node-metrics-cert\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.488746 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-openvswitch\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.488762 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-run-ovn-kubernetes\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.488787 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-kubelet\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.488805 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovnkube-config\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.488823 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmptx\" (UniqueName: \"kubernetes.io/projected/f839cb87-9d0b-44af-a9a9-8a6df524aa62-kube-api-access-hmptx\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.488906 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-run-netns\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.488937 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovnkube-script-lib\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.488966 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-systemd-units\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.489087 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-env-overrides\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.489196 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-cni-netd\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.489240 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-systemd\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.489257 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-etc-openvswitch\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.489277 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.489305 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-node-log\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.489336 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-var-lib-openvswitch\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.489365 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-cni-bin\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.489390 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-slash\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.493266 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.508197 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.508205 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.508331 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.508215 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.508441 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.508348 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: E1201 18:30:12.508525 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.522097 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.538064 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.551654 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.566366 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.579862 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.587736 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.587763 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.587774 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.587788 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.587799 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:12Z","lastTransitionTime":"2025-12-01T18:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590042 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovnkube-config\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590097 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmptx\" (UniqueName: \"kubernetes.io/projected/f839cb87-9d0b-44af-a9a9-8a6df524aa62-kube-api-access-hmptx\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590187 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-run-netns\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590220 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovnkube-script-lib\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590252 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-systemd-units\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590283 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-env-overrides\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590314 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-cni-netd\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590361 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-systemd\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590389 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-etc-openvswitch\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590420 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590474 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-var-lib-openvswitch\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590502 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-node-log\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590534 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-cni-bin\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590568 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-slash\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590611 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-log-socket\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590648 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-openvswitch\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590676 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-ovn\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590703 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovn-node-metrics-cert\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590740 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-run-ovn-kubernetes\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590782 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-kubelet\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590876 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-kubelet\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590889 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovnkube-config\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590907 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-cni-bin\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590934 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-slash\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590956 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-run-netns\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590961 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-cni-netd\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590982 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-systemd-units\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590987 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-systemd\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590986 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-env-overrides\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.591013 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.590980 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-log-socket\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.591038 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-node-log\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.591014 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-openvswitch\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.591015 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-var-lib-openvswitch\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.591070 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-etc-openvswitch\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.591045 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-ovn\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.591080 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-run-ovn-kubernetes\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.591085 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovnkube-script-lib\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.594961 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.595548 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovn-node-metrics-cert\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.607460 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmptx\" (UniqueName: \"kubernetes.io/projected/f839cb87-9d0b-44af-a9a9-8a6df524aa62-kube-api-access-hmptx\") pod \"ovnkube-node-4s97m\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.614515 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.627625 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.657465 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzx4x" event={"ID":"3f7b45c6-7cf7-420d-afb3-ea00b791af58","Type":"ContainerStarted","Data":"b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.657535 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzx4x" event={"ID":"3f7b45c6-7cf7-420d-afb3-ea00b791af58","Type":"ContainerStarted","Data":"c033906f5955f21ab3800db25528396f89e52f6e1c93305f5b72aae7fe02d267"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.660062 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" event={"ID":"b2a9250f-1d40-44de-ace0-dc64bc7bb803","Type":"ContainerStarted","Data":"5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.660111 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" event={"ID":"b2a9250f-1d40-44de-ace0-dc64bc7bb803","Type":"ContainerStarted","Data":"22e2a8c7e5497f94ba8adeca97cfca71b75335a5315a18fa0bbca9f49674511f"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.662392 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-26rsx" event={"ID":"839b5110-76e3-4c2f-80aa-1f2c0485e231","Type":"ContainerStarted","Data":"a6ba0708686b3057ddce4c74ead884179d28af774e1352b7df9645f82d197c47"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.663953 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.663989 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.663999 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"82fed7091acc6d9b43f87b2ea2db6e3bfd9e3fe463c79e329c146073acd29917"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.670880 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.682615 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.689997 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.690031 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.690040 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.690058 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.690071 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:12Z","lastTransitionTime":"2025-12-01T18:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.698196 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.698174 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.720461 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.739529 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.764969 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.788655 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.792864 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.792915 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.792926 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.792947 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.792962 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:12Z","lastTransitionTime":"2025-12-01T18:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.810847 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.827418 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.840411 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.855805 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.873924 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.886224 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.896026 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.896080 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.896091 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.896113 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.896128 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:12Z","lastTransitionTime":"2025-12-01T18:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.903061 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.916980 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.935644 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.949788 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.968256 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.981364 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:12 crc kubenswrapper[4935]: I1201 18:30:12.994848 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.005189 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.005233 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.005245 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.005261 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.005271 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:13Z","lastTransitionTime":"2025-12-01T18:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.009974 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.023756 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.035508 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.057978 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.076862 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.088486 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.100670 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.107571 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.107617 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.107632 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.107657 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.107671 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:13Z","lastTransitionTime":"2025-12-01T18:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.118401 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.210886 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.210931 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.210941 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.210959 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.210990 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:13Z","lastTransitionTime":"2025-12-01T18:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.313329 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.313365 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.313374 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.313390 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.313399 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:13Z","lastTransitionTime":"2025-12-01T18:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.415214 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.415633 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.415645 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.415661 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.415672 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:13Z","lastTransitionTime":"2025-12-01T18:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.521573 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.521652 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.521666 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.521690 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.521707 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:13Z","lastTransitionTime":"2025-12-01T18:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.625927 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.625978 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.625990 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.626009 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.626023 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:13Z","lastTransitionTime":"2025-12-01T18:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.668983 4935 generic.go:334] "Generic (PLEG): container finished" podID="b2a9250f-1d40-44de-ace0-dc64bc7bb803" containerID="5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95" exitCode=0 Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.669108 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" event={"ID":"b2a9250f-1d40-44de-ace0-dc64bc7bb803","Type":"ContainerDied","Data":"5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.670413 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-26rsx" event={"ID":"839b5110-76e3-4c2f-80aa-1f2c0485e231","Type":"ContainerStarted","Data":"2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.672421 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d" exitCode=0 Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.672475 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.672507 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerStarted","Data":"a113d13f0ef0e364cd3cdb1953878b38d85e136a708e3f9f696eefd4de936976"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.705029 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.718568 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.729992 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.730041 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.730053 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.730077 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.730090 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:13Z","lastTransitionTime":"2025-12-01T18:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.732328 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.751628 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.761583 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.780960 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.804462 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.817284 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.831082 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.834014 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.834049 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.834058 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.834077 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.834089 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:13Z","lastTransitionTime":"2025-12-01T18:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.843766 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.857984 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.870077 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.883860 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.901412 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.915109 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.937753 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.937807 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.937824 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.937845 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.937860 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:13Z","lastTransitionTime":"2025-12-01T18:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.940001 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.965072 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.984118 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:13 crc kubenswrapper[4935]: I1201 18:30:13.999264 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.020002 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.037638 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.039947 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.040015 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.040030 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.040060 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.040082 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:14Z","lastTransitionTime":"2025-12-01T18:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.071773 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.100312 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.142452 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.143796 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.143843 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.143858 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.143881 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.143898 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:14Z","lastTransitionTime":"2025-12-01T18:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.158929 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.175679 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.194339 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.221774 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.247058 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.247124 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.247172 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.247209 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.247227 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:14Z","lastTransitionTime":"2025-12-01T18:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.349098 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.349155 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.349167 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.349184 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.349195 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:14Z","lastTransitionTime":"2025-12-01T18:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.451787 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.451831 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.451843 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.451859 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.451870 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:14Z","lastTransitionTime":"2025-12-01T18:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.507424 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.507462 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:14 crc kubenswrapper[4935]: E1201 18:30:14.507939 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:14 crc kubenswrapper[4935]: E1201 18:30:14.508132 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.508690 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:14 crc kubenswrapper[4935]: E1201 18:30:14.508823 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.554161 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.554200 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.554209 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.554229 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.554243 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:14Z","lastTransitionTime":"2025-12-01T18:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.656825 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.656869 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.656879 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.656896 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.656905 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:14Z","lastTransitionTime":"2025-12-01T18:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.678844 4935 generic.go:334] "Generic (PLEG): container finished" podID="b2a9250f-1d40-44de-ace0-dc64bc7bb803" containerID="9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177" exitCode=0 Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.678936 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" event={"ID":"b2a9250f-1d40-44de-ace0-dc64bc7bb803","Type":"ContainerDied","Data":"9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.686486 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.687648 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a" exitCode=1 Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.688506 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerStarted","Data":"ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.688574 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerStarted","Data":"d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.688596 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerStarted","Data":"ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.688613 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerStarted","Data":"f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.688630 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.688651 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerStarted","Data":"043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.701378 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.722918 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.743592 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.761869 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.761906 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.761915 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.761934 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.761953 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:14Z","lastTransitionTime":"2025-12-01T18:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.766296 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.780979 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.806051 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.820549 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.832513 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.847176 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.862572 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.864675 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.864713 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.864726 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.864756 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.864778 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:14Z","lastTransitionTime":"2025-12-01T18:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.881278 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.895725 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.915390 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.937057 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.967989 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.968037 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.968050 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.968072 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:14 crc kubenswrapper[4935]: I1201 18:30:14.968084 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:14Z","lastTransitionTime":"2025-12-01T18:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.019335 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-79z2s"] Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.019774 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-79z2s" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.024348 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.024950 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.025127 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.024375 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.039628 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.051936 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.068685 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.071110 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.071225 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.071285 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.071361 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.071421 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:15Z","lastTransitionTime":"2025-12-01T18:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.085856 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.098212 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.115025 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.119646 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7b7c2198-c4bc-4944-948b-7e12d14f0e53-host\") pod \"node-ca-79z2s\" (UID: \"7b7c2198-c4bc-4944-948b-7e12d14f0e53\") " pod="openshift-image-registry/node-ca-79z2s" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.119698 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg65l\" (UniqueName: \"kubernetes.io/projected/7b7c2198-c4bc-4944-948b-7e12d14f0e53-kube-api-access-kg65l\") pod \"node-ca-79z2s\" (UID: \"7b7c2198-c4bc-4944-948b-7e12d14f0e53\") " pod="openshift-image-registry/node-ca-79z2s" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.119718 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/7b7c2198-c4bc-4944-948b-7e12d14f0e53-serviceca\") pod \"node-ca-79z2s\" (UID: \"7b7c2198-c4bc-4944-948b-7e12d14f0e53\") " pod="openshift-image-registry/node-ca-79z2s" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.140690 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.174563 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.174597 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.174613 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.174627 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.174637 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:15Z","lastTransitionTime":"2025-12-01T18:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.193894 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.220901 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/7b7c2198-c4bc-4944-948b-7e12d14f0e53-serviceca\") pod \"node-ca-79z2s\" (UID: \"7b7c2198-c4bc-4944-948b-7e12d14f0e53\") " pod="openshift-image-registry/node-ca-79z2s" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.221326 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7b7c2198-c4bc-4944-948b-7e12d14f0e53-host\") pod \"node-ca-79z2s\" (UID: \"7b7c2198-c4bc-4944-948b-7e12d14f0e53\") " pod="openshift-image-registry/node-ca-79z2s" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.221581 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg65l\" (UniqueName: \"kubernetes.io/projected/7b7c2198-c4bc-4944-948b-7e12d14f0e53-kube-api-access-kg65l\") pod \"node-ca-79z2s\" (UID: \"7b7c2198-c4bc-4944-948b-7e12d14f0e53\") " pod="openshift-image-registry/node-ca-79z2s" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.221617 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7b7c2198-c4bc-4944-948b-7e12d14f0e53-host\") pod \"node-ca-79z2s\" (UID: \"7b7c2198-c4bc-4944-948b-7e12d14f0e53\") " pod="openshift-image-registry/node-ca-79z2s" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.222077 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/7b7c2198-c4bc-4944-948b-7e12d14f0e53-serviceca\") pod \"node-ca-79z2s\" (UID: \"7b7c2198-c4bc-4944-948b-7e12d14f0e53\") " pod="openshift-image-registry/node-ca-79z2s" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.240883 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.260821 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg65l\" (UniqueName: \"kubernetes.io/projected/7b7c2198-c4bc-4944-948b-7e12d14f0e53-kube-api-access-kg65l\") pod \"node-ca-79z2s\" (UID: \"7b7c2198-c4bc-4944-948b-7e12d14f0e53\") " pod="openshift-image-registry/node-ca-79z2s" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.279012 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.279056 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.279068 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.279088 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.279105 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:15Z","lastTransitionTime":"2025-12-01T18:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.287495 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.324093 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.336879 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-79z2s" Dec 01 18:30:15 crc kubenswrapper[4935]: W1201 18:30:15.350291 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b7c2198_c4bc_4944_948b_7e12d14f0e53.slice/crio-04d5cec01a283c3644bf19409e870fd0ec84d86e674f020902545d42a6922aac WatchSource:0}: Error finding container 04d5cec01a283c3644bf19409e870fd0ec84d86e674f020902545d42a6922aac: Status 404 returned error can't find the container with id 04d5cec01a283c3644bf19409e870fd0ec84d86e674f020902545d42a6922aac Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.362656 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.381787 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.381829 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.381842 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.381862 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.381874 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:15Z","lastTransitionTime":"2025-12-01T18:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.402378 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.442613 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.483934 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.485181 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.485278 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.485293 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.485319 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.485330 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:15Z","lastTransitionTime":"2025-12-01T18:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.588478 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.588532 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.588545 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.588568 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.588583 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:15Z","lastTransitionTime":"2025-12-01T18:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.691223 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.691267 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.691276 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.691293 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.691304 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:15Z","lastTransitionTime":"2025-12-01T18:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.693665 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-79z2s" event={"ID":"7b7c2198-c4bc-4944-948b-7e12d14f0e53","Type":"ContainerStarted","Data":"701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100"} Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.693729 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-79z2s" event={"ID":"7b7c2198-c4bc-4944-948b-7e12d14f0e53","Type":"ContainerStarted","Data":"04d5cec01a283c3644bf19409e870fd0ec84d86e674f020902545d42a6922aac"} Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.696957 4935 generic.go:334] "Generic (PLEG): container finished" podID="b2a9250f-1d40-44de-ace0-dc64bc7bb803" containerID="0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4" exitCode=0 Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.697018 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" event={"ID":"b2a9250f-1d40-44de-ace0-dc64bc7bb803","Type":"ContainerDied","Data":"0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4"} Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.714210 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.729323 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.741607 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.759079 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.774490 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.791116 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.796694 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.796733 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.796744 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.796760 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.796773 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:15Z","lastTransitionTime":"2025-12-01T18:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.805786 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.819500 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.843267 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.884563 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.900475 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.900524 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.900535 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.900558 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.900580 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:15Z","lastTransitionTime":"2025-12-01T18:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.936659 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:15 crc kubenswrapper[4935]: I1201 18:30:15.965114 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:15Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.003819 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.004130 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.004192 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.004203 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.004224 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.004243 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:16Z","lastTransitionTime":"2025-12-01T18:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.039387 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.092509 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.108475 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.108540 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.108553 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.108577 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.108593 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:16Z","lastTransitionTime":"2025-12-01T18:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.122665 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.164679 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.207884 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.212354 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.212404 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.212416 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.212434 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.212452 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:16Z","lastTransitionTime":"2025-12-01T18:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.245417 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.285352 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.315888 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.315939 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.315957 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.315986 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.316011 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:16Z","lastTransitionTime":"2025-12-01T18:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.336594 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.368716 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.404129 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.419093 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.419214 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.419240 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.419275 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.419301 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:16Z","lastTransitionTime":"2025-12-01T18:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.445824 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.481515 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.507143 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.507236 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:16 crc kubenswrapper[4935]: E1201 18:30:16.507308 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.507245 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:16 crc kubenswrapper[4935]: E1201 18:30:16.507384 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:16 crc kubenswrapper[4935]: E1201 18:30:16.507492 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.521982 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.524073 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.524229 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.524258 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.524303 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.524331 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:16Z","lastTransitionTime":"2025-12-01T18:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.573170 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.607603 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.627660 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.627724 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.627738 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.627762 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.627777 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:16Z","lastTransitionTime":"2025-12-01T18:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.641787 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.684522 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.710203 4935 generic.go:334] "Generic (PLEG): container finished" podID="b2a9250f-1d40-44de-ace0-dc64bc7bb803" containerID="8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84" exitCode=0 Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.710341 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" event={"ID":"b2a9250f-1d40-44de-ace0-dc64bc7bb803","Type":"ContainerDied","Data":"8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84"} Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.724720 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.734394 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.734467 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.734484 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.734508 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.734523 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:16Z","lastTransitionTime":"2025-12-01T18:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.766982 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.801745 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.839018 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.839111 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.839127 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.839185 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.839206 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:16Z","lastTransitionTime":"2025-12-01T18:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.848863 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.889275 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.920287 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.941595 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.941637 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.941650 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.941671 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.941686 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:16Z","lastTransitionTime":"2025-12-01T18:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:16 crc kubenswrapper[4935]: I1201 18:30:16.964554 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.001822 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.044778 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.044833 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.044847 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.044871 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.044886 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.048763 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.088873 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.124280 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.148670 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.148741 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.148757 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.148783 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.148799 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.205773 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.230847 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.252205 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.252254 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.252267 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.252287 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.252299 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.259127 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.287567 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.328135 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.355173 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.355213 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.355240 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.355257 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.355267 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.365718 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.402480 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.440924 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.457704 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.457849 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.457953 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.458188 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.458309 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.487804 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.531986 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.561170 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.561536 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.561647 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.561740 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.561832 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.565225 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.603023 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.643651 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.665513 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.665623 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.665769 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.665811 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.665831 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.683264 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.725911 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.734175 4935 generic.go:334] "Generic (PLEG): container finished" podID="b2a9250f-1d40-44de-ace0-dc64bc7bb803" containerID="d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f" exitCode=0 Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.734264 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" event={"ID":"b2a9250f-1d40-44de-ace0-dc64bc7bb803","Type":"ContainerDied","Data":"d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f"} Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.753700 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.754858 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerStarted","Data":"51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c"} Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.772703 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.778015 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.778058 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.778069 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.778088 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.778099 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.812730 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.847321 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.881428 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.881457 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.881469 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.881486 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.881496 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.883563 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: E1201 18:30:17.897265 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.901488 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.901538 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.901554 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.901578 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.901596 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:17 crc kubenswrapper[4935]: E1201 18:30:17.917903 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.923108 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.923188 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.923206 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.923227 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.923245 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.923690 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: E1201 18:30:17.937294 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.941330 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.941373 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.941384 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.941403 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.941414 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:17 crc kubenswrapper[4935]: E1201 18:30:17.956038 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.960440 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.961066 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.961125 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.961140 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.961184 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.961200 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:17 crc kubenswrapper[4935]: E1201 18:30:17.973538 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:17 crc kubenswrapper[4935]: E1201 18:30:17.973698 4935 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.976427 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.976463 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.976474 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.976493 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:17 crc kubenswrapper[4935]: I1201 18:30:17.976505 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:17Z","lastTransitionTime":"2025-12-01T18:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.001718 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:17Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.042954 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.080497 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.080472 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.080535 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.080614 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.080636 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.080649 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:18Z","lastTransitionTime":"2025-12-01T18:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.123213 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.161652 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.183816 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.183886 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.183902 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.183945 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.183960 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:18Z","lastTransitionTime":"2025-12-01T18:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.202602 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.243449 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.282086 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.287857 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.287911 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.287922 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.287947 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.287960 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:18Z","lastTransitionTime":"2025-12-01T18:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.335350 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.379501 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.391916 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.391973 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.391992 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.392021 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.392040 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:18Z","lastTransitionTime":"2025-12-01T18:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.407059 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.450198 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.485055 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.495649 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.495731 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.495744 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.495788 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.495803 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:18Z","lastTransitionTime":"2025-12-01T18:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.508419 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.508472 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:18 crc kubenswrapper[4935]: E1201 18:30:18.508594 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.508663 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:18 crc kubenswrapper[4935]: E1201 18:30:18.508971 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:18 crc kubenswrapper[4935]: E1201 18:30:18.509050 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.600191 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.600869 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.600900 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.600926 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.600939 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:18Z","lastTransitionTime":"2025-12-01T18:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.705042 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.705602 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.705745 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.705874 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.705982 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:18Z","lastTransitionTime":"2025-12-01T18:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.764965 4935 generic.go:334] "Generic (PLEG): container finished" podID="b2a9250f-1d40-44de-ace0-dc64bc7bb803" containerID="580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7" exitCode=0 Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.765019 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" event={"ID":"b2a9250f-1d40-44de-ace0-dc64bc7bb803","Type":"ContainerDied","Data":"580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7"} Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.807108 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.810890 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.810932 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.810946 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.810968 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.810983 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:18Z","lastTransitionTime":"2025-12-01T18:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.829638 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.849986 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.867641 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.896444 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.914278 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.914360 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.914382 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.914418 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.914442 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:18Z","lastTransitionTime":"2025-12-01T18:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.918238 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.936630 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.949753 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.970028 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:18 crc kubenswrapper[4935]: I1201 18:30:18.983467 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.002624 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:18Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.017747 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.017805 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.017821 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.017851 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.017871 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:19Z","lastTransitionTime":"2025-12-01T18:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.018363 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.042915 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.060672 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.084509 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.120721 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.120778 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.120791 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.120812 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.120825 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:19Z","lastTransitionTime":"2025-12-01T18:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.223061 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.223102 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.223118 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.223138 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.223170 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:19Z","lastTransitionTime":"2025-12-01T18:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.326119 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.326198 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.326215 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.326237 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.326250 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:19Z","lastTransitionTime":"2025-12-01T18:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.429850 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.429899 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.429911 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.429929 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.429943 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:19Z","lastTransitionTime":"2025-12-01T18:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.532216 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.532266 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.532282 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.532300 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.532310 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:19Z","lastTransitionTime":"2025-12-01T18:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.636544 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.637123 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.637247 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.637287 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.637312 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:19Z","lastTransitionTime":"2025-12-01T18:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.741575 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.741638 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.741656 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.741681 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.741700 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:19Z","lastTransitionTime":"2025-12-01T18:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.774081 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.775173 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerStarted","Data":"c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0"} Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.775550 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.775789 4935 scope.go:117] "RemoveContainer" containerID="b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.784359 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" event={"ID":"b2a9250f-1d40-44de-ace0-dc64bc7bb803","Type":"ContainerStarted","Data":"c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401"} Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.795929 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.820779 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.835414 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.845691 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.845745 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.845765 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.845795 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.845815 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:19Z","lastTransitionTime":"2025-12-01T18:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.851339 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.853648 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.874085 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.889538 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.903258 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.917851 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.937718 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.948995 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.949052 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.949065 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.949086 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.949102 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:19Z","lastTransitionTime":"2025-12-01T18:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.953267 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.972573 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:19 crc kubenswrapper[4935]: I1201 18:30:19.985633 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.004798 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.017757 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.030889 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.044705 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.051609 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.051655 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.051666 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.051706 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.051738 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:20Z","lastTransitionTime":"2025-12-01T18:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.059533 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.077962 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.093537 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.108688 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.122423 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.134209 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.143358 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.154482 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.154577 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.154595 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.154624 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.154644 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:20Z","lastTransitionTime":"2025-12-01T18:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.165129 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.193924 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.212204 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.228758 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.239691 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.252475 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.257421 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.257493 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.257513 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.257559 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.257580 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:20Z","lastTransitionTime":"2025-12-01T18:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.282036 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.360183 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.360265 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.360285 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.360314 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.360371 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:20Z","lastTransitionTime":"2025-12-01T18:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.375230 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.375424 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.375464 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.375500 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.375623 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:30:36.375521979 +0000 UTC m=+50.397151288 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.375648 4935 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.375811 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:36.375790688 +0000 UTC m=+50.397419947 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.375831 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.375678 4935 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.375927 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.375686 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.376008 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.376036 4935 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.375943 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:36.375928452 +0000 UTC m=+50.397557921 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.375950 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.376069 4935 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.376099 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:36.376085197 +0000 UTC m=+50.397714466 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.376132 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:36.376112468 +0000 UTC m=+50.397741737 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.462649 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.462729 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.462751 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.462783 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.462808 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:20Z","lastTransitionTime":"2025-12-01T18:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.507630 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.507630 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.507761 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.507622 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.507895 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:20 crc kubenswrapper[4935]: E1201 18:30:20.507996 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.564820 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.564892 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.564910 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.564947 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.564968 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:20Z","lastTransitionTime":"2025-12-01T18:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.668342 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.668388 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.668401 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.668428 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.668440 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:20Z","lastTransitionTime":"2025-12-01T18:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.771245 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.771303 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.771330 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.771357 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.771376 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:20Z","lastTransitionTime":"2025-12-01T18:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.793610 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.794481 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerStarted","Data":"bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8"} Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.794665 4935 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.794998 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.816374 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.832603 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.841024 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.861748 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.874442 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.874536 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.874561 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.874590 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.874610 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:20Z","lastTransitionTime":"2025-12-01T18:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.880492 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.896828 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.921291 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.945872 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.960987 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.976431 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.977389 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.977445 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.977468 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.977497 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.977518 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:20Z","lastTransitionTime":"2025-12-01T18:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:20 crc kubenswrapper[4935]: I1201 18:30:20.994513 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.009631 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.025914 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.044175 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.062953 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.077084 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.080522 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.080583 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.080595 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.080614 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.080626 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:21Z","lastTransitionTime":"2025-12-01T18:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.097213 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.109609 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.131280 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.150199 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.164903 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.183801 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.183862 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.183872 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.183891 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.183903 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:21Z","lastTransitionTime":"2025-12-01T18:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.191655 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.209930 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.235801 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.251709 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.304300 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.304358 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.304368 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.304386 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.304399 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:21Z","lastTransitionTime":"2025-12-01T18:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.311562 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.327950 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.362575 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.401422 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.407348 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.407373 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.407383 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.407397 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.407408 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:21Z","lastTransitionTime":"2025-12-01T18:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.442823 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.479897 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:21Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.509757 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.509803 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.509816 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.509837 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.509850 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:21Z","lastTransitionTime":"2025-12-01T18:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.612677 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.612717 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.612730 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.612748 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.612761 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:21Z","lastTransitionTime":"2025-12-01T18:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.714930 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.714972 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.714985 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.715003 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.715016 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:21Z","lastTransitionTime":"2025-12-01T18:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.799688 4935 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.817603 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.817650 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.817660 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.817676 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.817686 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:21Z","lastTransitionTime":"2025-12-01T18:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.920717 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.920769 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.920784 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.920810 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:21 crc kubenswrapper[4935]: I1201 18:30:21.920859 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:21Z","lastTransitionTime":"2025-12-01T18:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.023739 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.023814 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.023835 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.023866 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.023893 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:22Z","lastTransitionTime":"2025-12-01T18:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.126478 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.126540 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.126550 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.126566 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.126578 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:22Z","lastTransitionTime":"2025-12-01T18:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.229056 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.229097 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.229107 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.229121 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.229132 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:22Z","lastTransitionTime":"2025-12-01T18:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.331189 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.331235 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.331243 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.331260 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.331270 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:22Z","lastTransitionTime":"2025-12-01T18:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.434109 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.434159 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.434167 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.434181 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.434189 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:22Z","lastTransitionTime":"2025-12-01T18:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.508140 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.508189 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:22 crc kubenswrapper[4935]: E1201 18:30:22.508424 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.508206 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:22 crc kubenswrapper[4935]: E1201 18:30:22.508490 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:22 crc kubenswrapper[4935]: E1201 18:30:22.508692 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.536686 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.536738 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.536748 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.536766 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.536777 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:22Z","lastTransitionTime":"2025-12-01T18:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.639174 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.639215 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.639224 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.639241 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.639251 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:22Z","lastTransitionTime":"2025-12-01T18:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.742237 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.742806 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.742832 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.742858 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.742878 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:22Z","lastTransitionTime":"2025-12-01T18:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.808878 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/0.log" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.817951 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.821649 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0" exitCode=1 Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.821752 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0"} Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.823006 4935 scope.go:117] "RemoveContainer" containerID="c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.845662 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.845720 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.845743 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.845769 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.845789 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:22Z","lastTransitionTime":"2025-12-01T18:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.861251 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:22Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.886721 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:22Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.909118 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:22Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.930544 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:22Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.950537 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.950609 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.950627 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.950665 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.950685 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:22Z","lastTransitionTime":"2025-12-01T18:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.959567 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:21Z\\\",\\\"message\\\":\\\" 6235 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 18:30:21.793734 6235 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 18:30:21.793774 6235 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 18:30:21.793805 6235 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 18:30:21.793812 6235 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 18:30:21.793813 6235 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 18:30:21.793825 6235 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 18:30:21.793831 6235 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 18:30:21.793831 6235 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 18:30:21.793837 6235 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:21.793856 6235 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 18:30:21.793881 6235 factory.go:656] Stopping watch factory\\\\nI1201 18:30:21.793901 6235 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:21.793929 6235 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 18:30:21.793939 6235 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:21.793941 6235 handler.go:208] Removed *v1.EgressIP\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:22Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:22 crc kubenswrapper[4935]: I1201 18:30:22.982069 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:22Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.006912 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:23Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.029637 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:23Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.054037 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.054107 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.054132 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.054209 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.054236 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:23Z","lastTransitionTime":"2025-12-01T18:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.057092 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:23Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.076384 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:23Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.098843 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:23Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.122252 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:23Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.142004 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:23Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.158877 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.158954 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.158975 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.159002 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.159023 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:23Z","lastTransitionTime":"2025-12-01T18:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.164849 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:23Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.186800 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:23Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.262625 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.262699 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.262722 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.262758 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.262779 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:23Z","lastTransitionTime":"2025-12-01T18:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.366769 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.366809 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.366820 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.366838 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.366850 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:23Z","lastTransitionTime":"2025-12-01T18:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.469831 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.469898 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.469966 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.469996 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.470016 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:23Z","lastTransitionTime":"2025-12-01T18:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.573442 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.573525 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.573549 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.573583 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.573607 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:23Z","lastTransitionTime":"2025-12-01T18:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.678011 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.678072 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.678095 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.678126 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.678179 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:23Z","lastTransitionTime":"2025-12-01T18:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.782976 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.783034 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.783050 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.783072 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.783084 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:23Z","lastTransitionTime":"2025-12-01T18:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.887093 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.887199 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.887217 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.887246 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.887265 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:23Z","lastTransitionTime":"2025-12-01T18:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.990622 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.990663 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.990685 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.990711 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:23 crc kubenswrapper[4935]: I1201 18:30:23.990732 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:23Z","lastTransitionTime":"2025-12-01T18:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.079411 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459"] Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.080120 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.084599 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.084941 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.094023 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.094066 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.094084 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.094107 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.094123 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:24Z","lastTransitionTime":"2025-12-01T18:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.106373 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.123858 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/adc07034-9cd3-4982-bce2-91a840220814-env-overrides\") pod \"ovnkube-control-plane-749d76644c-tk459\" (UID: \"adc07034-9cd3-4982-bce2-91a840220814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.123942 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/adc07034-9cd3-4982-bce2-91a840220814-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-tk459\" (UID: \"adc07034-9cd3-4982-bce2-91a840220814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.123982 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwq5x\" (UniqueName: \"kubernetes.io/projected/adc07034-9cd3-4982-bce2-91a840220814-kube-api-access-nwq5x\") pod \"ovnkube-control-plane-749d76644c-tk459\" (UID: \"adc07034-9cd3-4982-bce2-91a840220814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.124022 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/adc07034-9cd3-4982-bce2-91a840220814-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-tk459\" (UID: \"adc07034-9cd3-4982-bce2-91a840220814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.130243 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.147138 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.171356 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.188722 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.197308 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.197362 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.197376 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.197400 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.197414 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:24Z","lastTransitionTime":"2025-12-01T18:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.212014 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.225381 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/adc07034-9cd3-4982-bce2-91a840220814-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-tk459\" (UID: \"adc07034-9cd3-4982-bce2-91a840220814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.225455 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/adc07034-9cd3-4982-bce2-91a840220814-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-tk459\" (UID: \"adc07034-9cd3-4982-bce2-91a840220814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.225493 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwq5x\" (UniqueName: \"kubernetes.io/projected/adc07034-9cd3-4982-bce2-91a840220814-kube-api-access-nwq5x\") pod \"ovnkube-control-plane-749d76644c-tk459\" (UID: \"adc07034-9cd3-4982-bce2-91a840220814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.225629 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/adc07034-9cd3-4982-bce2-91a840220814-env-overrides\") pod \"ovnkube-control-plane-749d76644c-tk459\" (UID: \"adc07034-9cd3-4982-bce2-91a840220814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.226799 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/adc07034-9cd3-4982-bce2-91a840220814-env-overrides\") pod \"ovnkube-control-plane-749d76644c-tk459\" (UID: \"adc07034-9cd3-4982-bce2-91a840220814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.226943 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/adc07034-9cd3-4982-bce2-91a840220814-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-tk459\" (UID: \"adc07034-9cd3-4982-bce2-91a840220814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.236695 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/adc07034-9cd3-4982-bce2-91a840220814-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-tk459\" (UID: \"adc07034-9cd3-4982-bce2-91a840220814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.248230 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.257917 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwq5x\" (UniqueName: \"kubernetes.io/projected/adc07034-9cd3-4982-bce2-91a840220814-kube-api-access-nwq5x\") pod \"ovnkube-control-plane-749d76644c-tk459\" (UID: \"adc07034-9cd3-4982-bce2-91a840220814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.271857 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.295749 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.300018 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.300067 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.300080 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.300099 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.300112 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:24Z","lastTransitionTime":"2025-12-01T18:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.324251 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.340933 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.368591 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.386921 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.402886 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.402949 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.402965 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.402986 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.403001 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:24Z","lastTransitionTime":"2025-12-01T18:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.403703 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.408227 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" Dec 01 18:30:24 crc kubenswrapper[4935]: W1201 18:30:24.422411 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podadc07034_9cd3_4982_bce2_91a840220814.slice/crio-0a37884126949e3272e67ad835305a4fc2eb01e04456dcedd87891dbf57e2131 WatchSource:0}: Error finding container 0a37884126949e3272e67ad835305a4fc2eb01e04456dcedd87891dbf57e2131: Status 404 returned error can't find the container with id 0a37884126949e3272e67ad835305a4fc2eb01e04456dcedd87891dbf57e2131 Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.422542 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.451911 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:21Z\\\",\\\"message\\\":\\\" 6235 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 18:30:21.793734 6235 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 18:30:21.793774 6235 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 18:30:21.793805 6235 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 18:30:21.793812 6235 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 18:30:21.793813 6235 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 18:30:21.793825 6235 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 18:30:21.793831 6235 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 18:30:21.793831 6235 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 18:30:21.793837 6235 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:21.793856 6235 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 18:30:21.793881 6235 factory.go:656] Stopping watch factory\\\\nI1201 18:30:21.793901 6235 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:21.793929 6235 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 18:30:21.793939 6235 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:21.793941 6235 handler.go:208] Removed *v1.EgressIP\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.507772 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.507848 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.507861 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.507879 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.507892 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:24Z","lastTransitionTime":"2025-12-01T18:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.508461 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.508540 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.508634 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:24 crc kubenswrapper[4935]: E1201 18:30:24.508859 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:24 crc kubenswrapper[4935]: E1201 18:30:24.508930 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:24 crc kubenswrapper[4935]: E1201 18:30:24.508786 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.602225 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.610296 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.610351 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.610369 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.610401 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.610417 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:24Z","lastTransitionTime":"2025-12-01T18:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.713076 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.713179 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.713200 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.713231 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.713253 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:24Z","lastTransitionTime":"2025-12-01T18:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.815188 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.815225 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.815235 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.815250 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.815261 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:24Z","lastTransitionTime":"2025-12-01T18:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.832758 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/0.log" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.834948 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.835751 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerStarted","Data":"d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2"} Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.836546 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.836760 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" event={"ID":"adc07034-9cd3-4982-bce2-91a840220814","Type":"ContainerStarted","Data":"0a37884126949e3272e67ad835305a4fc2eb01e04456dcedd87891dbf57e2131"} Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.849670 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.862297 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.876715 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.890942 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.909677 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.917860 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.917908 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.917919 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.917940 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.917952 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:24Z","lastTransitionTime":"2025-12-01T18:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.929743 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.949433 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.980702 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:24 crc kubenswrapper[4935]: I1201 18:30:24.995986 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:24Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.021245 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:21Z\\\",\\\"message\\\":\\\" 6235 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 18:30:21.793734 6235 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 18:30:21.793774 6235 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 18:30:21.793805 6235 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 18:30:21.793812 6235 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 18:30:21.793813 6235 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 18:30:21.793825 6235 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 18:30:21.793831 6235 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 18:30:21.793831 6235 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 18:30:21.793837 6235 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:21.793856 6235 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 18:30:21.793881 6235 factory.go:656] Stopping watch factory\\\\nI1201 18:30:21.793901 6235 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:21.793929 6235 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 18:30:21.793939 6235 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:21.793941 6235 handler.go:208] Removed *v1.EgressIP\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:25Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.021398 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.021591 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.021602 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.021619 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.021636 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:25Z","lastTransitionTime":"2025-12-01T18:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.041774 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:25Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.054068 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:25Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.067873 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:25Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.079245 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:25Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.095869 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:25Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.112878 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:25Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.125222 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.125284 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.125303 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.125336 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.125360 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:25Z","lastTransitionTime":"2025-12-01T18:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.227845 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.227896 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.227906 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.227924 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.227937 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:25Z","lastTransitionTime":"2025-12-01T18:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.331205 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.331249 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.331263 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.331282 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.331295 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:25Z","lastTransitionTime":"2025-12-01T18:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.433791 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.433849 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.433866 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.433892 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.433913 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:25Z","lastTransitionTime":"2025-12-01T18:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.537881 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.537948 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.537973 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.538006 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.538032 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:25Z","lastTransitionTime":"2025-12-01T18:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.640479 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.640847 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.641045 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.641246 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.641383 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:25Z","lastTransitionTime":"2025-12-01T18:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.744873 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.744954 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.744979 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.745018 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.745043 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:25Z","lastTransitionTime":"2025-12-01T18:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.852347 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/1.log" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.853044 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.853106 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.853126 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.853180 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.853200 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:25Z","lastTransitionTime":"2025-12-01T18:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.853926 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/0.log" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.860225 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.862532 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2"} Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.862646 4935 scope.go:117] "RemoveContainer" containerID="c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.863706 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2" exitCode=1 Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.863937 4935 scope.go:117] "RemoveContainer" containerID="d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2" Dec 01 18:30:25 crc kubenswrapper[4935]: E1201 18:30:25.864317 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.868548 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" event={"ID":"adc07034-9cd3-4982-bce2-91a840220814","Type":"ContainerStarted","Data":"f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b"} Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.868607 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" event={"ID":"adc07034-9cd3-4982-bce2-91a840220814","Type":"ContainerStarted","Data":"c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85"} Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.889451 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:25Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.914048 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:25Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.940335 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:25Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.951941 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-8jhtj"] Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.953022 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:25 crc kubenswrapper[4935]: E1201 18:30:25.953142 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.957451 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:25Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.960503 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.960601 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.960625 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.960697 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.960720 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:25Z","lastTransitionTime":"2025-12-01T18:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:25 crc kubenswrapper[4935]: I1201 18:30:25.977919 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:25Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.000080 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:25Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.023678 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.044607 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.044748 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d9nv\" (UniqueName: \"kubernetes.io/projected/a3c94c79-953e-4cac-b6c4-e98aeef74928-kube-api-access-8d9nv\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.051426 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.068238 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.068329 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.068357 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.068396 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.068423 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:26Z","lastTransitionTime":"2025-12-01T18:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.073219 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.093921 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.117227 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.145039 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.146688 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d9nv\" (UniqueName: \"kubernetes.io/projected/a3c94c79-953e-4cac-b6c4-e98aeef74928-kube-api-access-8d9nv\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.146790 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:26 crc kubenswrapper[4935]: E1201 18:30:26.147003 4935 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:26 crc kubenswrapper[4935]: E1201 18:30:26.147105 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs podName:a3c94c79-953e-4cac-b6c4-e98aeef74928 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:26.647082954 +0000 UTC m=+40.668712223 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs") pod "network-metrics-daemon-8jhtj" (UID: "a3c94c79-953e-4cac-b6c4-e98aeef74928") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.171726 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.172387 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.172450 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.172476 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.172509 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.172535 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:26Z","lastTransitionTime":"2025-12-01T18:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.176411 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d9nv\" (UniqueName: \"kubernetes.io/projected/a3c94c79-953e-4cac-b6c4-e98aeef74928-kube-api-access-8d9nv\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.194338 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.229987 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:21Z\\\",\\\"message\\\":\\\" 6235 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 18:30:21.793734 6235 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 18:30:21.793774 6235 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 18:30:21.793805 6235 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 18:30:21.793812 6235 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 18:30:21.793813 6235 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 18:30:21.793825 6235 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 18:30:21.793831 6235 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 18:30:21.793831 6235 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 18:30:21.793837 6235 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:21.793856 6235 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 18:30:21.793881 6235 factory.go:656] Stopping watch factory\\\\nI1201 18:30:21.793901 6235 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:21.793929 6235 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 18:30:21.793939 6235 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:21.793941 6235 handler.go:208] Removed *v1.EgressIP\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\" 6397 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:24.813902 6397 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:24.813919 6397 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 18:30:24.814104 6397 factory.go:656] Stopping watch factory\\\\nI1201 18:30:24.814340 6397 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-network-diagnostics/network-check-target for endpointslice openshift-network-diagnostics/network-check-target-zkp6h as it is not a known egress service\\\\nI1201 18:30:24.814393 6397 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.814921 6397 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.815340 6397 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.816126 6397 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:24.816245 6397 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.264117 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.276791 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.276867 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.276887 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.276917 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.276941 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:26Z","lastTransitionTime":"2025-12-01T18:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.284210 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.304620 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.334993 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.358854 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.379826 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.381445 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.381511 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.381530 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.381558 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.381576 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:26Z","lastTransitionTime":"2025-12-01T18:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.403690 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.422298 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.453120 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:21Z\\\",\\\"message\\\":\\\" 6235 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 18:30:21.793734 6235 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 18:30:21.793774 6235 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 18:30:21.793805 6235 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 18:30:21.793812 6235 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 18:30:21.793813 6235 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 18:30:21.793825 6235 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 18:30:21.793831 6235 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 18:30:21.793831 6235 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 18:30:21.793837 6235 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:21.793856 6235 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 18:30:21.793881 6235 factory.go:656] Stopping watch factory\\\\nI1201 18:30:21.793901 6235 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:21.793929 6235 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 18:30:21.793939 6235 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:21.793941 6235 handler.go:208] Removed *v1.EgressIP\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\" 6397 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:24.813902 6397 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:24.813919 6397 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 18:30:24.814104 6397 factory.go:656] Stopping watch factory\\\\nI1201 18:30:24.814340 6397 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-network-diagnostics/network-check-target for endpointslice openshift-network-diagnostics/network-check-target-zkp6h as it is not a known egress service\\\\nI1201 18:30:24.814393 6397 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.814921 6397 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.815340 6397 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.816126 6397 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:24.816245 6397 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.484855 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.484918 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.484937 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.484972 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.484992 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:26Z","lastTransitionTime":"2025-12-01T18:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.488058 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.507592 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.507656 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.507688 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:26 crc kubenswrapper[4935]: E1201 18:30:26.507807 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:26 crc kubenswrapper[4935]: E1201 18:30:26.507949 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:26 crc kubenswrapper[4935]: E1201 18:30:26.508075 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.516315 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.547729 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.563219 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.579921 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.588485 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.588547 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.588575 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.588610 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.588637 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:26Z","lastTransitionTime":"2025-12-01T18:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.599893 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.626225 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.646432 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.653634 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:26 crc kubenswrapper[4935]: E1201 18:30:26.653979 4935 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:26 crc kubenswrapper[4935]: E1201 18:30:26.654177 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs podName:a3c94c79-953e-4cac-b6c4-e98aeef74928 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:27.654102766 +0000 UTC m=+41.675732065 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs") pod "network-metrics-daemon-8jhtj" (UID: "a3c94c79-953e-4cac-b6c4-e98aeef74928") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.669485 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.688566 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.692112 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.692189 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.692201 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.692225 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.692241 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:26Z","lastTransitionTime":"2025-12-01T18:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.715865 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.736002 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.759968 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.782656 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.796853 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.796914 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.796926 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.796946 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.796960 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:26Z","lastTransitionTime":"2025-12-01T18:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.802951 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.823252 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.839254 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.872584 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c737bd24a19c504e56ea2f3f69a37dd8ceb71acc16190a3b333ccc8ae3d93df0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:21Z\\\",\\\"message\\\":\\\" 6235 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 18:30:21.793734 6235 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 18:30:21.793774 6235 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 18:30:21.793805 6235 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 18:30:21.793812 6235 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 18:30:21.793813 6235 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 18:30:21.793825 6235 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 18:30:21.793831 6235 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 18:30:21.793831 6235 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 18:30:21.793837 6235 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:21.793856 6235 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 18:30:21.793881 6235 factory.go:656] Stopping watch factory\\\\nI1201 18:30:21.793901 6235 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:21.793929 6235 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1201 18:30:21.793939 6235 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:21.793941 6235 handler.go:208] Removed *v1.EgressIP\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\" 6397 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:24.813902 6397 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:24.813919 6397 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 18:30:24.814104 6397 factory.go:656] Stopping watch factory\\\\nI1201 18:30:24.814340 6397 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-network-diagnostics/network-check-target for endpointslice openshift-network-diagnostics/network-check-target-zkp6h as it is not a known egress service\\\\nI1201 18:30:24.814393 6397 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.814921 6397 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.815340 6397 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.816126 6397 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:24.816245 6397 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.878689 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/1.log" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.884015 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.886579 4935 scope.go:117] "RemoveContainer" containerID="d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2" Dec 01 18:30:26 crc kubenswrapper[4935]: E1201 18:30:26.886811 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.901224 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.901559 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.901742 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.901944 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.902125 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:26Z","lastTransitionTime":"2025-12-01T18:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.902716 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.921380 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.955671 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.971075 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:26 crc kubenswrapper[4935]: I1201 18:30:26.986999 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.001893 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.005025 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.005065 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.005080 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.005102 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.005117 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:27Z","lastTransitionTime":"2025-12-01T18:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.016615 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.029814 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.049513 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\" 6397 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:24.813902 6397 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:24.813919 6397 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 18:30:24.814104 6397 factory.go:656] Stopping watch factory\\\\nI1201 18:30:24.814340 6397 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-network-diagnostics/network-check-target for endpointslice openshift-network-diagnostics/network-check-target-zkp6h as it is not a known egress service\\\\nI1201 18:30:24.814393 6397 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.814921 6397 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.815340 6397 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.816126 6397 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:24.816245 6397 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.079699 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.096101 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.107531 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.107606 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.107634 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.107665 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.107685 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:27Z","lastTransitionTime":"2025-12-01T18:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.116472 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.128714 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.140754 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.154460 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.167877 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.182419 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.197073 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.211101 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.211167 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.211182 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.211205 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.211141 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.211221 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:27Z","lastTransitionTime":"2025-12-01T18:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.224018 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.250695 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.265492 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.278911 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.292950 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.304010 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:27Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.314274 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.314328 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.314347 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.314375 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.314394 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:27Z","lastTransitionTime":"2025-12-01T18:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.417231 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.417308 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.417327 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.417358 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.417376 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:27Z","lastTransitionTime":"2025-12-01T18:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.507864 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:27 crc kubenswrapper[4935]: E1201 18:30:27.508111 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.521121 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.521227 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.521248 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.521274 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.521295 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:27Z","lastTransitionTime":"2025-12-01T18:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.624768 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.624843 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.624860 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.624890 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.624909 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:27Z","lastTransitionTime":"2025-12-01T18:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.666611 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:27 crc kubenswrapper[4935]: E1201 18:30:27.666903 4935 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:27 crc kubenswrapper[4935]: E1201 18:30:27.667023 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs podName:a3c94c79-953e-4cac-b6c4-e98aeef74928 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:29.666988113 +0000 UTC m=+43.688617402 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs") pod "network-metrics-daemon-8jhtj" (UID: "a3c94c79-953e-4cac-b6c4-e98aeef74928") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.728422 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.728487 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.728513 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.728550 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.728577 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:27Z","lastTransitionTime":"2025-12-01T18:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.832316 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.832404 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.832430 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.832463 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.832490 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:27Z","lastTransitionTime":"2025-12-01T18:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.935472 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.935519 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.935528 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.935547 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:27 crc kubenswrapper[4935]: I1201 18:30:27.935560 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:27Z","lastTransitionTime":"2025-12-01T18:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.038846 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.038946 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.038964 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.038991 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.039029 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.142507 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.142581 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.142595 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.142622 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.142637 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.245813 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.245895 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.245923 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.245951 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.245970 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.267419 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.267480 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.267502 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.267532 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.267553 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: E1201 18:30:28.326807 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:28Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.332079 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.332114 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.332127 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.332163 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.332180 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: E1201 18:30:28.352353 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:28Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.357464 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.357540 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.357569 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.357604 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.357630 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: E1201 18:30:28.375449 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:28Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.380595 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.380628 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.380641 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.380662 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.380675 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: E1201 18:30:28.395352 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:28Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.403004 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.403128 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.403252 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.403784 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.403818 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: E1201 18:30:28.423231 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:28Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:28 crc kubenswrapper[4935]: E1201 18:30:28.423466 4935 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.426075 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.426108 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.426120 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.426139 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.426167 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.507696 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.507701 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:28 crc kubenswrapper[4935]: E1201 18:30:28.507901 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.507725 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:28 crc kubenswrapper[4935]: E1201 18:30:28.508024 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:28 crc kubenswrapper[4935]: E1201 18:30:28.508570 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.528776 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.528850 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.528870 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.528900 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.528955 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.632134 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.632258 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.632282 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.632318 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.632348 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.735509 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.735580 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.735602 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.735632 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.735655 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.839254 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.839317 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.839345 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.839369 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.839384 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.942202 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.942265 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.942282 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.942305 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:28 crc kubenswrapper[4935]: I1201 18:30:28.942318 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:28Z","lastTransitionTime":"2025-12-01T18:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.044757 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.044847 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.044875 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.044912 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.044937 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:29Z","lastTransitionTime":"2025-12-01T18:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.148977 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.149077 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.149131 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.149215 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.149240 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:29Z","lastTransitionTime":"2025-12-01T18:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.252168 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.252209 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.252220 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.252237 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.252250 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:29Z","lastTransitionTime":"2025-12-01T18:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.354740 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.354795 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.354807 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.354827 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.354841 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:29Z","lastTransitionTime":"2025-12-01T18:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.457966 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.458015 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.458029 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.458047 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.458059 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:29Z","lastTransitionTime":"2025-12-01T18:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.507592 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:29 crc kubenswrapper[4935]: E1201 18:30:29.507853 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.560485 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.560537 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.560548 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.560570 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.560585 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:29Z","lastTransitionTime":"2025-12-01T18:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.663369 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.663443 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.663467 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.663500 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.663524 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:29Z","lastTransitionTime":"2025-12-01T18:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.691419 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:29 crc kubenswrapper[4935]: E1201 18:30:29.691768 4935 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:29 crc kubenswrapper[4935]: E1201 18:30:29.691930 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs podName:a3c94c79-953e-4cac-b6c4-e98aeef74928 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:33.691891938 +0000 UTC m=+47.713521317 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs") pod "network-metrics-daemon-8jhtj" (UID: "a3c94c79-953e-4cac-b6c4-e98aeef74928") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.767984 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.768049 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.768073 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.768104 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.768121 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:29Z","lastTransitionTime":"2025-12-01T18:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.871280 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.871349 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.871369 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.871398 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.871420 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:29Z","lastTransitionTime":"2025-12-01T18:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.975030 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.975118 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.975189 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.975230 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:29 crc kubenswrapper[4935]: I1201 18:30:29.975256 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:29Z","lastTransitionTime":"2025-12-01T18:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.077556 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.077620 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.077633 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.077667 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.077678 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:30Z","lastTransitionTime":"2025-12-01T18:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.181042 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.181096 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.181109 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.181132 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.181178 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:30Z","lastTransitionTime":"2025-12-01T18:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.285467 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.285533 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.285546 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.285569 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.285584 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:30Z","lastTransitionTime":"2025-12-01T18:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.389598 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.389649 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.389665 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.389688 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.389705 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:30Z","lastTransitionTime":"2025-12-01T18:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.492835 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.492923 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.492947 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.492979 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.492998 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:30Z","lastTransitionTime":"2025-12-01T18:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.507423 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.507484 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.507530 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:30 crc kubenswrapper[4935]: E1201 18:30:30.507706 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:30 crc kubenswrapper[4935]: E1201 18:30:30.507748 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:30 crc kubenswrapper[4935]: E1201 18:30:30.507846 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.596295 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.596338 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.596346 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.596361 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.596372 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:30Z","lastTransitionTime":"2025-12-01T18:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.699466 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.699512 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.699521 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.699540 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.699553 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:30Z","lastTransitionTime":"2025-12-01T18:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.803358 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.803482 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.803500 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.803525 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.803544 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:30Z","lastTransitionTime":"2025-12-01T18:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.905610 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.905671 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.905680 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.905694 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:30 crc kubenswrapper[4935]: I1201 18:30:30.905704 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:30Z","lastTransitionTime":"2025-12-01T18:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.008359 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.008454 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.008519 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.008561 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.008580 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:31Z","lastTransitionTime":"2025-12-01T18:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.111676 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.111740 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.111762 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.111787 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.111805 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:31Z","lastTransitionTime":"2025-12-01T18:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.215540 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.215588 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.215614 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.215636 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.215650 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:31Z","lastTransitionTime":"2025-12-01T18:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.319008 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.319054 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.319062 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.319081 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.319092 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:31Z","lastTransitionTime":"2025-12-01T18:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.422729 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.422783 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.422794 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.422813 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.422826 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:31Z","lastTransitionTime":"2025-12-01T18:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.508573 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:31 crc kubenswrapper[4935]: E1201 18:30:31.508784 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.525307 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.525373 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.525390 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.525416 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.525435 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:31Z","lastTransitionTime":"2025-12-01T18:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.627837 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.627879 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.627898 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.627918 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.627929 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:31Z","lastTransitionTime":"2025-12-01T18:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.730373 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.730421 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.730430 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.730452 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.730466 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:31Z","lastTransitionTime":"2025-12-01T18:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.834328 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.834409 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.834436 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.834467 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.834490 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:31Z","lastTransitionTime":"2025-12-01T18:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.937482 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.937530 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.937542 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.937560 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:31 crc kubenswrapper[4935]: I1201 18:30:31.937571 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:31Z","lastTransitionTime":"2025-12-01T18:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.040177 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.040217 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.040228 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.040243 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.040254 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:32Z","lastTransitionTime":"2025-12-01T18:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.143665 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.143738 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.143754 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.143782 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.143799 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:32Z","lastTransitionTime":"2025-12-01T18:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.247006 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.247070 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.247082 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.247104 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.247119 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:32Z","lastTransitionTime":"2025-12-01T18:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.351071 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.351121 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.351131 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.351165 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.351175 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:32Z","lastTransitionTime":"2025-12-01T18:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.453053 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.453108 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.453128 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.453162 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.453173 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:32Z","lastTransitionTime":"2025-12-01T18:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.507055 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.507120 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.507137 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:32 crc kubenswrapper[4935]: E1201 18:30:32.507285 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:32 crc kubenswrapper[4935]: E1201 18:30:32.507345 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:32 crc kubenswrapper[4935]: E1201 18:30:32.507472 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.555991 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.556047 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.556060 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.556081 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.556096 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:32Z","lastTransitionTime":"2025-12-01T18:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.659562 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.659613 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.659624 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.659640 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.659653 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:32Z","lastTransitionTime":"2025-12-01T18:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.775502 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.775552 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.775561 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.775577 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.775587 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:32Z","lastTransitionTime":"2025-12-01T18:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.879302 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.879362 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.879376 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.879398 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.879413 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:32Z","lastTransitionTime":"2025-12-01T18:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.981957 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.982030 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.982047 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.982081 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:32 crc kubenswrapper[4935]: I1201 18:30:32.982096 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:32Z","lastTransitionTime":"2025-12-01T18:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.084467 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.084529 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.084546 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.084571 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.084591 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:33Z","lastTransitionTime":"2025-12-01T18:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.187486 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.187555 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.187565 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.187584 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.187595 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:33Z","lastTransitionTime":"2025-12-01T18:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.289864 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.289941 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.289964 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.289993 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.290012 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:33Z","lastTransitionTime":"2025-12-01T18:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.392849 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.392912 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.392930 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.392957 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.392975 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:33Z","lastTransitionTime":"2025-12-01T18:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.495919 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.495993 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.496012 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.496040 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.496057 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:33Z","lastTransitionTime":"2025-12-01T18:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.506975 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:33 crc kubenswrapper[4935]: E1201 18:30:33.507184 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.599694 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.599760 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.599780 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.599806 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.599823 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:33Z","lastTransitionTime":"2025-12-01T18:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.702962 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.703017 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.703033 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.703051 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.703065 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:33Z","lastTransitionTime":"2025-12-01T18:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.785943 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:33 crc kubenswrapper[4935]: E1201 18:30:33.786115 4935 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:33 crc kubenswrapper[4935]: E1201 18:30:33.786240 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs podName:a3c94c79-953e-4cac-b6c4-e98aeef74928 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:41.78621999 +0000 UTC m=+55.807849249 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs") pod "network-metrics-daemon-8jhtj" (UID: "a3c94c79-953e-4cac-b6c4-e98aeef74928") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.806088 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.806181 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.806198 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.806224 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.806244 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:33Z","lastTransitionTime":"2025-12-01T18:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.909006 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.909071 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.909082 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.909099 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:33 crc kubenswrapper[4935]: I1201 18:30:33.909111 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:33Z","lastTransitionTime":"2025-12-01T18:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.011872 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.011922 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.011934 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.011951 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.011961 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:34Z","lastTransitionTime":"2025-12-01T18:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.115897 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.115947 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.115956 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.115974 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.115986 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:34Z","lastTransitionTime":"2025-12-01T18:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.218691 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.218744 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.218758 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.218784 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.218799 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:34Z","lastTransitionTime":"2025-12-01T18:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.322193 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.322309 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.322329 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.322358 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.322377 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:34Z","lastTransitionTime":"2025-12-01T18:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.425370 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.425443 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.425459 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.425479 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.425492 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:34Z","lastTransitionTime":"2025-12-01T18:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.507321 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.507319 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:34 crc kubenswrapper[4935]: E1201 18:30:34.507474 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.507338 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:34 crc kubenswrapper[4935]: E1201 18:30:34.507641 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:34 crc kubenswrapper[4935]: E1201 18:30:34.507892 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.527812 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.527859 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.527868 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.527889 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.527902 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:34Z","lastTransitionTime":"2025-12-01T18:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.631620 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.631712 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.631731 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.631760 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.631776 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:34Z","lastTransitionTime":"2025-12-01T18:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.734919 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.734983 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.734998 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.735021 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.735038 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:34Z","lastTransitionTime":"2025-12-01T18:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.837576 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.837665 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.837676 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.837696 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.837711 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:34Z","lastTransitionTime":"2025-12-01T18:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.941485 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.941557 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.941583 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.941617 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:34 crc kubenswrapper[4935]: I1201 18:30:34.941643 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:34Z","lastTransitionTime":"2025-12-01T18:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.040455 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.044827 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.044889 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.044904 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.044928 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.044943 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:35Z","lastTransitionTime":"2025-12-01T18:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.061624 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.081907 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.109397 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.135831 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.148370 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.148423 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.148433 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.148450 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.148475 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:35Z","lastTransitionTime":"2025-12-01T18:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.154817 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.185820 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\" 6397 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:24.813902 6397 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:24.813919 6397 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 18:30:24.814104 6397 factory.go:656] Stopping watch factory\\\\nI1201 18:30:24.814340 6397 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-network-diagnostics/network-check-target for endpointslice openshift-network-diagnostics/network-check-target-zkp6h as it is not a known egress service\\\\nI1201 18:30:24.814393 6397 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.814921 6397 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.815340 6397 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.816126 6397 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:24.816245 6397 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.209104 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.229547 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.250215 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.251915 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.251971 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.251985 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.252006 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.252021 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:35Z","lastTransitionTime":"2025-12-01T18:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.273861 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.288865 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.301713 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.323705 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.343922 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.355961 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.356019 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.356041 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.356069 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.356090 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:35Z","lastTransitionTime":"2025-12-01T18:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.364961 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.385121 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.404724 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.421512 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:35Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.459932 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.460057 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.460086 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.460121 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.460175 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:35Z","lastTransitionTime":"2025-12-01T18:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.507835 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:35 crc kubenswrapper[4935]: E1201 18:30:35.508044 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.563801 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.563873 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.563894 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.563921 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.563939 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:35Z","lastTransitionTime":"2025-12-01T18:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.671038 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.671101 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.671118 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.671174 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.671193 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:35Z","lastTransitionTime":"2025-12-01T18:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.776047 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.776115 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.776127 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.776193 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.776206 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:35Z","lastTransitionTime":"2025-12-01T18:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.880056 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.880195 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.880215 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.880247 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.880266 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:35Z","lastTransitionTime":"2025-12-01T18:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.983347 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.983630 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.983675 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.983708 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:35 crc kubenswrapper[4935]: I1201 18:30:35.983729 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:35Z","lastTransitionTime":"2025-12-01T18:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.086550 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.086605 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.086618 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.086639 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.086653 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:36Z","lastTransitionTime":"2025-12-01T18:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.190181 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.190229 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.190244 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.190265 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.190281 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:36Z","lastTransitionTime":"2025-12-01T18:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.292749 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.292794 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.292807 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.292826 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.292835 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:36Z","lastTransitionTime":"2025-12-01T18:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.396098 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.396162 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.396176 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.396199 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.396210 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:36Z","lastTransitionTime":"2025-12-01T18:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.420985 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.421333 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:31:08.421290174 +0000 UTC m=+82.442919443 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.421447 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.421508 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.421594 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.421656 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.421776 4935 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.421900 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.421933 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.421955 4935 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.421796 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.421957 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:31:08.421920714 +0000 UTC m=+82.443550013 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.422033 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.422088 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 18:31:08.422046897 +0000 UTC m=+82.443676156 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.421808 4935 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.422119 4935 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.422177 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:31:08.42216686 +0000 UTC m=+82.443796119 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.422196 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 18:31:08.422184551 +0000 UTC m=+82.443813820 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.499020 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.499078 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.499093 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.499114 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.499129 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:36Z","lastTransitionTime":"2025-12-01T18:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.507402 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.507426 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.507505 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.507663 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.507775 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:36 crc kubenswrapper[4935]: E1201 18:30:36.507868 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.529897 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.547013 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.564806 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.582925 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.599008 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.602978 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.603012 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.603024 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.603245 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.603270 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:36Z","lastTransitionTime":"2025-12-01T18:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.613959 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.635094 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.649458 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.669556 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.684517 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.701727 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.706053 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.706095 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.706111 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.706134 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.706164 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:36Z","lastTransitionTime":"2025-12-01T18:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.713935 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.733427 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.746345 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.761630 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.782205 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.796626 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.813897 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.813941 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.813951 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.813973 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.813989 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:36Z","lastTransitionTime":"2025-12-01T18:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.819729 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\" 6397 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:24.813902 6397 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:24.813919 6397 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 18:30:24.814104 6397 factory.go:656] Stopping watch factory\\\\nI1201 18:30:24.814340 6397 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-network-diagnostics/network-check-target for endpointslice openshift-network-diagnostics/network-check-target-zkp6h as it is not a known egress service\\\\nI1201 18:30:24.814393 6397 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.814921 6397 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.815340 6397 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.816126 6397 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:24.816245 6397 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:36Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.916682 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.916733 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.916745 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.916763 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:36 crc kubenswrapper[4935]: I1201 18:30:36.916776 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:36Z","lastTransitionTime":"2025-12-01T18:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.018992 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.019034 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.019044 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.019062 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.019071 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:37Z","lastTransitionTime":"2025-12-01T18:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.121968 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.122008 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.122017 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.122037 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.122050 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:37Z","lastTransitionTime":"2025-12-01T18:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.224472 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.224528 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.224541 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.224564 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.224576 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:37Z","lastTransitionTime":"2025-12-01T18:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.327600 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.327673 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.327687 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.327705 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.327717 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:37Z","lastTransitionTime":"2025-12-01T18:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.430958 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.431432 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.431448 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.431470 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.431483 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:37Z","lastTransitionTime":"2025-12-01T18:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.507414 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:37 crc kubenswrapper[4935]: E1201 18:30:37.507611 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.533685 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.533972 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.534037 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.534164 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.534241 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:37Z","lastTransitionTime":"2025-12-01T18:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.637039 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.637664 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.637842 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.638086 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.638329 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:37Z","lastTransitionTime":"2025-12-01T18:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.741758 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.741836 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.741860 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.741893 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.741916 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:37Z","lastTransitionTime":"2025-12-01T18:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.844672 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.844763 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.844781 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.844811 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.844829 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:37Z","lastTransitionTime":"2025-12-01T18:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.948742 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.948827 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.948846 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.948876 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:37 crc kubenswrapper[4935]: I1201 18:30:37.948900 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:37Z","lastTransitionTime":"2025-12-01T18:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.053061 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.053128 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.053180 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.053210 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.053229 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.156180 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.156234 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.156246 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.156267 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.156280 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.259900 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.260208 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.260307 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.260406 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.260498 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.363983 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.364072 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.364084 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.364105 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.364115 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.466940 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.467007 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.467018 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.467035 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.467046 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.468020 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.468053 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.468065 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.468082 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.468112 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: E1201 18:30:38.485958 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:38Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.490888 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.490958 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.490983 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.491016 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.491051 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.508590 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.508655 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:38 crc kubenswrapper[4935]: E1201 18:30:38.508761 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:38 crc kubenswrapper[4935]: E1201 18:30:38.508873 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.509144 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:38 crc kubenswrapper[4935]: E1201 18:30:38.509493 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:38 crc kubenswrapper[4935]: E1201 18:30:38.509849 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:38Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.514369 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.514414 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.514429 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.514488 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.514519 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: E1201 18:30:38.533803 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:38Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.538951 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.539001 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.539015 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.539036 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.539053 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: E1201 18:30:38.554951 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:38Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.560415 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.560638 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.561039 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.561235 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.561407 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: E1201 18:30:38.580295 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:38Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:38 crc kubenswrapper[4935]: E1201 18:30:38.580524 4935 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.582510 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.582660 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.582872 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.583077 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.583392 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.687107 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.687475 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.687598 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.687670 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.687727 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.791477 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.791542 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.791562 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.791588 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.791610 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.894621 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.894685 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.894707 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.894731 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.894752 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.998553 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.998605 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.998620 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.998642 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:38 crc kubenswrapper[4935]: I1201 18:30:38.998653 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:38Z","lastTransitionTime":"2025-12-01T18:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.103579 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.103631 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.103644 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.103666 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.103680 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:39Z","lastTransitionTime":"2025-12-01T18:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.206638 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.206683 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.206700 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.206721 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.206736 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:39Z","lastTransitionTime":"2025-12-01T18:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.309988 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.310062 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.310087 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.310115 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.310139 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:39Z","lastTransitionTime":"2025-12-01T18:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.413403 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.413457 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.413469 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.413489 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.413506 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:39Z","lastTransitionTime":"2025-12-01T18:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.508000 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:39 crc kubenswrapper[4935]: E1201 18:30:39.508215 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.516769 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.516820 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.516833 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.516853 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.516867 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:39Z","lastTransitionTime":"2025-12-01T18:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.619990 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.620082 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.620119 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.620199 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.620244 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:39Z","lastTransitionTime":"2025-12-01T18:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.722997 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.723073 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.723083 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.723100 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.723176 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:39Z","lastTransitionTime":"2025-12-01T18:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.825636 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.825673 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.825685 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.825703 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.825715 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:39Z","lastTransitionTime":"2025-12-01T18:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.929712 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.929761 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.929771 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.929792 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:39 crc kubenswrapper[4935]: I1201 18:30:39.929806 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:39Z","lastTransitionTime":"2025-12-01T18:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.032725 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.032809 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.032823 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.032841 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.032855 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:40Z","lastTransitionTime":"2025-12-01T18:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.136298 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.136353 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.136392 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.136431 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.136455 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:40Z","lastTransitionTime":"2025-12-01T18:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.240647 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.241095 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.241466 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.241730 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.241950 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:40Z","lastTransitionTime":"2025-12-01T18:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.345139 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.345201 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.345218 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.345240 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.345254 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:40Z","lastTransitionTime":"2025-12-01T18:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.448128 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.448209 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.448226 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.448252 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.448269 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:40Z","lastTransitionTime":"2025-12-01T18:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.508500 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:40 crc kubenswrapper[4935]: E1201 18:30:40.508781 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.508521 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:40 crc kubenswrapper[4935]: E1201 18:30:40.508915 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.508521 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:40 crc kubenswrapper[4935]: E1201 18:30:40.509034 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.551012 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.551062 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.551074 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.551097 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.551115 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:40Z","lastTransitionTime":"2025-12-01T18:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.653544 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.653861 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.653993 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.654109 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.654231 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:40Z","lastTransitionTime":"2025-12-01T18:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.757742 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.758029 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.758096 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.758219 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.758288 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:40Z","lastTransitionTime":"2025-12-01T18:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.861638 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.861982 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.862081 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.862189 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.862275 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:40Z","lastTransitionTime":"2025-12-01T18:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.965366 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.965428 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.965442 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.965471 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:40 crc kubenswrapper[4935]: I1201 18:30:40.965485 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:40Z","lastTransitionTime":"2025-12-01T18:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.068392 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.068468 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.068480 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.068502 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.068516 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:41Z","lastTransitionTime":"2025-12-01T18:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.171204 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.171539 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.171630 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.171702 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.171772 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:41Z","lastTransitionTime":"2025-12-01T18:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.275011 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.275069 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.275082 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.275102 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.275114 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:41Z","lastTransitionTime":"2025-12-01T18:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.378133 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.378235 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.378247 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.378269 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.378280 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:41Z","lastTransitionTime":"2025-12-01T18:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.481507 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.481555 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.481566 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.481586 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.481599 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:41Z","lastTransitionTime":"2025-12-01T18:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.507070 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:41 crc kubenswrapper[4935]: E1201 18:30:41.507283 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.508076 4935 scope.go:117] "RemoveContainer" containerID="d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.583784 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.583834 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.583850 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.583871 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.583886 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:41Z","lastTransitionTime":"2025-12-01T18:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.687234 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.687275 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.687288 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.687308 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.687320 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:41Z","lastTransitionTime":"2025-12-01T18:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.790199 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.790236 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.790245 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.790263 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.790273 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:41Z","lastTransitionTime":"2025-12-01T18:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.886266 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:41 crc kubenswrapper[4935]: E1201 18:30:41.886569 4935 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:41 crc kubenswrapper[4935]: E1201 18:30:41.886776 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs podName:a3c94c79-953e-4cac-b6c4-e98aeef74928 nodeName:}" failed. No retries permitted until 2025-12-01 18:30:57.886705614 +0000 UTC m=+71.908334903 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs") pod "network-metrics-daemon-8jhtj" (UID: "a3c94c79-953e-4cac-b6c4-e98aeef74928") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.893240 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.893359 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.893418 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.893523 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.893618 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:41Z","lastTransitionTime":"2025-12-01T18:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.941429 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/1.log" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.944667 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.945617 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerStarted","Data":"7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7"} Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.946229 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.963775 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:41Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.978332 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:41Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:41 crc kubenswrapper[4935]: I1201 18:30:41.993110 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:41Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.000863 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.000908 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.000928 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.000948 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.000966 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:42Z","lastTransitionTime":"2025-12-01T18:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.013424 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.026688 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.038674 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.050538 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.070815 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\" 6397 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:24.813902 6397 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:24.813919 6397 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 18:30:24.814104 6397 factory.go:656] Stopping watch factory\\\\nI1201 18:30:24.814340 6397 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-network-diagnostics/network-check-target for endpointslice openshift-network-diagnostics/network-check-target-zkp6h as it is not a known egress service\\\\nI1201 18:30:24.814393 6397 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.814921 6397 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.815340 6397 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.816126 6397 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:24.816245 6397 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.092924 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.103742 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.103816 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.103831 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.103853 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.103866 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:42Z","lastTransitionTime":"2025-12-01T18:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.107797 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.127812 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.148772 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.160981 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.171987 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.191039 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.204717 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.206415 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.206458 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.206471 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.206492 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.206502 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:42Z","lastTransitionTime":"2025-12-01T18:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.218672 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.239277 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.310257 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.310325 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.310337 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.310355 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.310366 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:42Z","lastTransitionTime":"2025-12-01T18:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.413654 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.413728 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.413741 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.413763 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.413775 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:42Z","lastTransitionTime":"2025-12-01T18:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.507464 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.507579 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:42 crc kubenswrapper[4935]: E1201 18:30:42.507671 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:42 crc kubenswrapper[4935]: E1201 18:30:42.507844 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.507498 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:42 crc kubenswrapper[4935]: E1201 18:30:42.508466 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.516488 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.516551 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.516581 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.516614 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.516639 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:42Z","lastTransitionTime":"2025-12-01T18:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.619933 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.619996 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.620008 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.620030 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.620040 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:42Z","lastTransitionTime":"2025-12-01T18:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.723560 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.723627 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.723638 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.723662 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.723673 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:42Z","lastTransitionTime":"2025-12-01T18:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.827418 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.827473 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.827486 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.827510 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.827525 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:42Z","lastTransitionTime":"2025-12-01T18:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.930421 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.930747 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.930762 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.930783 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.930801 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:42Z","lastTransitionTime":"2025-12-01T18:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.950527 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/2.log" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.951546 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/1.log" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.958083 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.959263 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7" exitCode=1 Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.959334 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7"} Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.959443 4935 scope.go:117] "RemoveContainer" containerID="d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.960084 4935 scope.go:117] "RemoveContainer" containerID="7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7" Dec 01 18:30:42 crc kubenswrapper[4935]: E1201 18:30:42.960408 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" Dec 01 18:30:42 crc kubenswrapper[4935]: I1201 18:30:42.977193 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:42Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.004347 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.018670 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.032325 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.034744 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.034808 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.034818 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.034837 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.034850 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:43Z","lastTransitionTime":"2025-12-01T18:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.047521 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.062898 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.079333 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.096038 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.111880 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.128855 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.138482 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.138782 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.138898 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.139015 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.139121 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:43Z","lastTransitionTime":"2025-12-01T18:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.146617 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.164828 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.182119 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.197833 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.210497 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.240616 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0e7cd418e16eaf1639e839f7d7f7201cb7ed86202e9f1d3072738dc8a6d8cf2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"message\\\":\\\" 6397 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 18:30:24.813902 6397 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 18:30:24.813919 6397 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 18:30:24.814104 6397 factory.go:656] Stopping watch factory\\\\nI1201 18:30:24.814340 6397 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-network-diagnostics/network-check-target for endpointslice openshift-network-diagnostics/network-check-target-zkp6h as it is not a known egress service\\\\nI1201 18:30:24.814393 6397 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.814921 6397 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.815340 6397 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:24.816126 6397 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:24.816245 6397 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:42Z\\\",\\\"message\\\":\\\" for endpointslice openshift-cluster-version/cluster-version-operator-xvdnk as it is not a known egress service\\\\nI1201 18:30:42.356513 6613 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-console/downloads for endpointslice openshift-console/downloads-mt5b4 as it is not a known egress service\\\\nI1201 18:30:42.356602 6613 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.356970 6613 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357195 6613 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357520 6613 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 18:30:42.357833 6613 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.358369 6613 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:42.358405 6613 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.242589 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.242649 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.242669 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.242700 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.242722 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:43Z","lastTransitionTime":"2025-12-01T18:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.267580 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.287310 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.345322 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.345371 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.345385 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.345406 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.345424 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:43Z","lastTransitionTime":"2025-12-01T18:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.448272 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.448323 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.448335 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.448358 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.448370 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:43Z","lastTransitionTime":"2025-12-01T18:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.507040 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:43 crc kubenswrapper[4935]: E1201 18:30:43.507305 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.551921 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.551998 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.552014 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.552039 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.552053 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:43Z","lastTransitionTime":"2025-12-01T18:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.655611 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.655697 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.655726 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.655836 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.655864 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:43Z","lastTransitionTime":"2025-12-01T18:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.758651 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.758682 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.758692 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.758708 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.758720 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:43Z","lastTransitionTime":"2025-12-01T18:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.860955 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.860989 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.861008 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.861025 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.861037 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:43Z","lastTransitionTime":"2025-12-01T18:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.964089 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.964133 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.964169 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.964268 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.964286 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:43Z","lastTransitionTime":"2025-12-01T18:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.966227 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/2.log" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.969305 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.971084 4935 scope.go:117] "RemoveContainer" containerID="7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7" Dec 01 18:30:43 crc kubenswrapper[4935]: E1201 18:30:43.971369 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" Dec 01 18:30:43 crc kubenswrapper[4935]: I1201 18:30:43.987048 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.005402 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:43Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.024516 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.038425 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.052844 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.065100 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.067745 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.067794 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.067813 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.067835 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.067849 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:44Z","lastTransitionTime":"2025-12-01T18:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.088845 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.103322 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.119874 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.133206 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.143446 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.161871 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:42Z\\\",\\\"message\\\":\\\" for endpointslice openshift-cluster-version/cluster-version-operator-xvdnk as it is not a known egress service\\\\nI1201 18:30:42.356513 6613 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-console/downloads for endpointslice openshift-console/downloads-mt5b4 as it is not a known egress service\\\\nI1201 18:30:42.356602 6613 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.356970 6613 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357195 6613 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357520 6613 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 18:30:42.357833 6613 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.358369 6613 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:42.358405 6613 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.170755 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.170855 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.170917 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.170979 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.171033 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:44Z","lastTransitionTime":"2025-12-01T18:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.179089 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.192726 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.209216 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.223814 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.235259 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.248790 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:44Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.274345 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.274387 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.274402 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.274428 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.274444 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:44Z","lastTransitionTime":"2025-12-01T18:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.378401 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.378498 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.378521 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.378552 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.378571 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:44Z","lastTransitionTime":"2025-12-01T18:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.481478 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.481534 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.481554 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.481579 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.481597 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:44Z","lastTransitionTime":"2025-12-01T18:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.508010 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.508061 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.508061 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:44 crc kubenswrapper[4935]: E1201 18:30:44.508232 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:44 crc kubenswrapper[4935]: E1201 18:30:44.508478 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:44 crc kubenswrapper[4935]: E1201 18:30:44.508511 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.584479 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.584530 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.584539 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.584558 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.584570 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:44Z","lastTransitionTime":"2025-12-01T18:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.686986 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.687051 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.687069 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.687097 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.687116 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:44Z","lastTransitionTime":"2025-12-01T18:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.790250 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.790307 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.790323 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.790347 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.790361 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:44Z","lastTransitionTime":"2025-12-01T18:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.893771 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.893837 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.893854 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.893879 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.893895 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:44Z","lastTransitionTime":"2025-12-01T18:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.996500 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.996550 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.996559 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.996576 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:44 crc kubenswrapper[4935]: I1201 18:30:44.996587 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:44Z","lastTransitionTime":"2025-12-01T18:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.100062 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.100119 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.100132 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.100174 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.100191 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:45Z","lastTransitionTime":"2025-12-01T18:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.203042 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.203096 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.203108 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.203128 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.203140 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:45Z","lastTransitionTime":"2025-12-01T18:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.306451 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.306541 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.306566 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.306596 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.306613 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:45Z","lastTransitionTime":"2025-12-01T18:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.410035 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.410101 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.410123 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.410185 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.410209 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:45Z","lastTransitionTime":"2025-12-01T18:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.507123 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:45 crc kubenswrapper[4935]: E1201 18:30:45.507418 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.513677 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.513721 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.513731 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.513747 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.513760 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:45Z","lastTransitionTime":"2025-12-01T18:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.616538 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.616592 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.616605 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.616626 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.616641 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:45Z","lastTransitionTime":"2025-12-01T18:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.719514 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.719559 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.719568 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.719586 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.719598 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:45Z","lastTransitionTime":"2025-12-01T18:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.822241 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.822300 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.822317 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.822344 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.822361 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:45Z","lastTransitionTime":"2025-12-01T18:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.924394 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.924430 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.924443 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.924464 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:45 crc kubenswrapper[4935]: I1201 18:30:45.924478 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:45Z","lastTransitionTime":"2025-12-01T18:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.028121 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.028213 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.028236 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.028298 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.028320 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:46Z","lastTransitionTime":"2025-12-01T18:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.131423 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.131481 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.131491 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.131512 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.131524 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:46Z","lastTransitionTime":"2025-12-01T18:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.233799 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.234189 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.234285 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.234453 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.234525 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:46Z","lastTransitionTime":"2025-12-01T18:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.337391 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.337457 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.337470 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.337495 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.337522 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:46Z","lastTransitionTime":"2025-12-01T18:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.440335 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.440393 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.440412 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.440433 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.440446 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:46Z","lastTransitionTime":"2025-12-01T18:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.507822 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.507822 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:46 crc kubenswrapper[4935]: E1201 18:30:46.508042 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.507851 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:46 crc kubenswrapper[4935]: E1201 18:30:46.508077 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:46 crc kubenswrapper[4935]: E1201 18:30:46.508250 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.523678 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.539524 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.543302 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.543409 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.543481 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.543550 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.543628 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:46Z","lastTransitionTime":"2025-12-01T18:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.554186 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.568639 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.581002 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.605060 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:42Z\\\",\\\"message\\\":\\\" for endpointslice openshift-cluster-version/cluster-version-operator-xvdnk as it is not a known egress service\\\\nI1201 18:30:42.356513 6613 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-console/downloads for endpointslice openshift-console/downloads-mt5b4 as it is not a known egress service\\\\nI1201 18:30:42.356602 6613 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.356970 6613 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357195 6613 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357520 6613 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 18:30:42.357833 6613 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.358369 6613 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:42.358405 6613 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.633935 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.646646 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.646680 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.646688 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.646708 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.646638 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.646718 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:46Z","lastTransitionTime":"2025-12-01T18:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.659400 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.673593 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.681578 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.691185 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.706594 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.722583 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.735214 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.749696 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.749743 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.749757 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.749778 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.749792 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:46Z","lastTransitionTime":"2025-12-01T18:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.753405 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.763896 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.777618 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:46Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.851909 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.851954 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.851967 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.851986 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.851998 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:46Z","lastTransitionTime":"2025-12-01T18:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.955812 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.955860 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.955876 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.955902 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:46 crc kubenswrapper[4935]: I1201 18:30:46.955919 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:46Z","lastTransitionTime":"2025-12-01T18:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.058479 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.058552 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.058570 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.059023 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.059076 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:47Z","lastTransitionTime":"2025-12-01T18:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.162749 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.162806 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.162823 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.162848 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.162865 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:47Z","lastTransitionTime":"2025-12-01T18:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.265499 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.265580 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.265604 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.265637 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.265661 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:47Z","lastTransitionTime":"2025-12-01T18:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.367884 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.367921 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.367930 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.367946 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.367954 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:47Z","lastTransitionTime":"2025-12-01T18:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.470874 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.470924 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.470933 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.470953 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.470964 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:47Z","lastTransitionTime":"2025-12-01T18:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.507691 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:47 crc kubenswrapper[4935]: E1201 18:30:47.507892 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.573467 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.573516 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.573528 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.573548 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.573562 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:47Z","lastTransitionTime":"2025-12-01T18:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.676372 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.676409 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.676418 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.676433 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.676444 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:47Z","lastTransitionTime":"2025-12-01T18:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.779347 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.779402 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.779415 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.779438 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.779452 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:47Z","lastTransitionTime":"2025-12-01T18:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.882818 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.882938 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.883003 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.883039 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.883098 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:47Z","lastTransitionTime":"2025-12-01T18:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.986335 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.986431 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.986451 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.986477 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:47 crc kubenswrapper[4935]: I1201 18:30:47.986527 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:47Z","lastTransitionTime":"2025-12-01T18:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.089834 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.089926 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.089951 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.089987 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.090012 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:48Z","lastTransitionTime":"2025-12-01T18:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.192700 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.192783 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.192808 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.192840 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.192868 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:48Z","lastTransitionTime":"2025-12-01T18:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.297005 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.297083 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.297101 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.297127 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.297186 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:48Z","lastTransitionTime":"2025-12-01T18:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.400428 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.400477 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.400495 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.400523 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.400540 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:48Z","lastTransitionTime":"2025-12-01T18:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.503925 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.503969 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.503979 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.503999 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.504009 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:48Z","lastTransitionTime":"2025-12-01T18:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.507196 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:48 crc kubenswrapper[4935]: E1201 18:30:48.507303 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.507456 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.507497 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:48 crc kubenswrapper[4935]: E1201 18:30:48.507548 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:48 crc kubenswrapper[4935]: E1201 18:30:48.507719 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.612745 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.612855 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.612874 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.612901 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.612946 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:48Z","lastTransitionTime":"2025-12-01T18:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.715898 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.715934 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.715944 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.715961 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.715970 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:48Z","lastTransitionTime":"2025-12-01T18:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.796280 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.796351 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.796369 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.796396 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.796412 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:48Z","lastTransitionTime":"2025-12-01T18:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:48 crc kubenswrapper[4935]: E1201 18:30:48.814401 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:48Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.819217 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.819446 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.819667 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.819911 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.820076 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:48Z","lastTransitionTime":"2025-12-01T18:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:48 crc kubenswrapper[4935]: E1201 18:30:48.838767 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:48Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.846356 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.846460 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.846481 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.846509 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.846532 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:48Z","lastTransitionTime":"2025-12-01T18:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:48 crc kubenswrapper[4935]: E1201 18:30:48.861204 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:48Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.865072 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.865260 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.865385 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.865520 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.865688 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:48Z","lastTransitionTime":"2025-12-01T18:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:48 crc kubenswrapper[4935]: E1201 18:30:48.881479 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:48Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.885582 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.885784 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.885920 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.886054 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.886187 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:48Z","lastTransitionTime":"2025-12-01T18:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:48 crc kubenswrapper[4935]: E1201 18:30:48.898465 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:48Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:48 crc kubenswrapper[4935]: E1201 18:30:48.898597 4935 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.900241 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.900284 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.900302 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.900320 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:48 crc kubenswrapper[4935]: I1201 18:30:48.900330 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:48Z","lastTransitionTime":"2025-12-01T18:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.002889 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.003223 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.003298 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.003373 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.003430 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:49Z","lastTransitionTime":"2025-12-01T18:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.105436 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.105487 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.105498 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.105515 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.105526 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:49Z","lastTransitionTime":"2025-12-01T18:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.208272 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.208337 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.208354 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.208379 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.208400 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:49Z","lastTransitionTime":"2025-12-01T18:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.311694 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.311740 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.311749 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.311768 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.311779 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:49Z","lastTransitionTime":"2025-12-01T18:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.414431 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.414483 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.414495 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.414516 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.414565 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:49Z","lastTransitionTime":"2025-12-01T18:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.507808 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:49 crc kubenswrapper[4935]: E1201 18:30:49.508014 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.517374 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.517603 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.517899 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.518348 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.518511 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:49Z","lastTransitionTime":"2025-12-01T18:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.621806 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.622237 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.622371 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.622526 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.622660 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:49Z","lastTransitionTime":"2025-12-01T18:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.726097 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.727199 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.727453 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.727666 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.727909 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:49Z","lastTransitionTime":"2025-12-01T18:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.831293 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.831739 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.832522 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.832674 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.832808 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:49Z","lastTransitionTime":"2025-12-01T18:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.936165 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.936495 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.936587 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.936681 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:49 crc kubenswrapper[4935]: I1201 18:30:49.936768 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:49Z","lastTransitionTime":"2025-12-01T18:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.039754 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.039798 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.039809 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.039826 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.039836 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:50Z","lastTransitionTime":"2025-12-01T18:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.142370 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.142695 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.142814 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.142930 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.143051 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:50Z","lastTransitionTime":"2025-12-01T18:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.249244 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.249300 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.249313 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.249337 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.249352 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:50Z","lastTransitionTime":"2025-12-01T18:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.352927 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.352988 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.352997 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.353019 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.353029 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:50Z","lastTransitionTime":"2025-12-01T18:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.457087 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.457131 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.457163 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.457183 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.457197 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:50Z","lastTransitionTime":"2025-12-01T18:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.507634 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.507709 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:50 crc kubenswrapper[4935]: E1201 18:30:50.507819 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:50 crc kubenswrapper[4935]: E1201 18:30:50.507936 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.508073 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:50 crc kubenswrapper[4935]: E1201 18:30:50.508187 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.560297 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.560370 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.560380 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.560400 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.560414 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:50Z","lastTransitionTime":"2025-12-01T18:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.664348 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.664412 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.664425 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.664448 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.664463 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:50Z","lastTransitionTime":"2025-12-01T18:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.772874 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.772926 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.772943 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.773016 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.773028 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:50Z","lastTransitionTime":"2025-12-01T18:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.876922 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.876981 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.876993 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.877016 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.877028 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:50Z","lastTransitionTime":"2025-12-01T18:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.979956 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.980020 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.980038 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.980064 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:50 crc kubenswrapper[4935]: I1201 18:30:50.980084 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:50Z","lastTransitionTime":"2025-12-01T18:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.082634 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.082717 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.082729 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.082749 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.082760 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:51Z","lastTransitionTime":"2025-12-01T18:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.185759 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.185807 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.185818 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.185842 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.185856 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:51Z","lastTransitionTime":"2025-12-01T18:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.288748 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.289213 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.289300 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.289383 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.289448 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:51Z","lastTransitionTime":"2025-12-01T18:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.392269 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.392314 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.392322 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.392342 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.392353 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:51Z","lastTransitionTime":"2025-12-01T18:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.494546 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.495090 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.495310 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.495476 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.495654 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:51Z","lastTransitionTime":"2025-12-01T18:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.507761 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:51 crc kubenswrapper[4935]: E1201 18:30:51.507928 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.598584 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.598638 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.598651 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.598683 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.598697 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:51Z","lastTransitionTime":"2025-12-01T18:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.701535 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.701597 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.701608 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.701632 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.701644 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:51Z","lastTransitionTime":"2025-12-01T18:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.805362 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.805483 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.805509 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.805537 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.805584 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:51Z","lastTransitionTime":"2025-12-01T18:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.908228 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.908281 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.908291 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.908310 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:51 crc kubenswrapper[4935]: I1201 18:30:51.908323 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:51Z","lastTransitionTime":"2025-12-01T18:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.012382 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.012437 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.012454 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.012485 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.012501 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:52Z","lastTransitionTime":"2025-12-01T18:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.115677 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.115734 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.115747 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.115770 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.115786 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:52Z","lastTransitionTime":"2025-12-01T18:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.218966 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.219020 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.219036 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.219054 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.219065 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:52Z","lastTransitionTime":"2025-12-01T18:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.321660 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.321704 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.321714 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.321734 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.321745 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:52Z","lastTransitionTime":"2025-12-01T18:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.424683 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.424731 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.424742 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.424761 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.424772 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:52Z","lastTransitionTime":"2025-12-01T18:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.507905 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.508033 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:52 crc kubenswrapper[4935]: E1201 18:30:52.508097 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.507914 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:52 crc kubenswrapper[4935]: E1201 18:30:52.508318 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:52 crc kubenswrapper[4935]: E1201 18:30:52.508314 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.528110 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.528179 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.528188 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.528207 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.528220 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:52Z","lastTransitionTime":"2025-12-01T18:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.631638 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.631691 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.631701 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.631726 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.631740 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:52Z","lastTransitionTime":"2025-12-01T18:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.734341 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.734376 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.734383 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.734400 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.734410 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:52Z","lastTransitionTime":"2025-12-01T18:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.836657 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.836689 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.836701 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.836718 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.836726 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:52Z","lastTransitionTime":"2025-12-01T18:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.938612 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.938686 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.938707 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.938749 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:52 crc kubenswrapper[4935]: I1201 18:30:52.938769 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:52Z","lastTransitionTime":"2025-12-01T18:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.041279 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.041326 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.041338 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.041358 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.041370 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:53Z","lastTransitionTime":"2025-12-01T18:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.144641 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.144690 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.144703 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.144719 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.144732 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:53Z","lastTransitionTime":"2025-12-01T18:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.252361 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.252412 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.252421 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.252439 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.252450 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:53Z","lastTransitionTime":"2025-12-01T18:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.354676 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.354714 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.354724 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.354742 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.354753 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:53Z","lastTransitionTime":"2025-12-01T18:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.457478 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.457545 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.457557 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.457573 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.457583 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:53Z","lastTransitionTime":"2025-12-01T18:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.507663 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:53 crc kubenswrapper[4935]: E1201 18:30:53.507903 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.559803 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.559853 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.559887 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.559909 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.559923 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:53Z","lastTransitionTime":"2025-12-01T18:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.663099 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.663166 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.663177 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.663196 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.663208 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:53Z","lastTransitionTime":"2025-12-01T18:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.766070 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.766111 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.766120 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.766137 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.766162 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:53Z","lastTransitionTime":"2025-12-01T18:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.869226 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.869284 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.869296 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.869322 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.869336 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:53Z","lastTransitionTime":"2025-12-01T18:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.971801 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.971849 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.971862 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.971883 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:53 crc kubenswrapper[4935]: I1201 18:30:53.971894 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:53Z","lastTransitionTime":"2025-12-01T18:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.075087 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.075136 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.075165 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.075186 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.075198 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:54Z","lastTransitionTime":"2025-12-01T18:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.178513 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.178568 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.178580 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.178600 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.178614 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:54Z","lastTransitionTime":"2025-12-01T18:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.281664 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.281729 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.281742 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.281766 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.281780 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:54Z","lastTransitionTime":"2025-12-01T18:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.384455 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.384532 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.384545 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.384567 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.384580 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:54Z","lastTransitionTime":"2025-12-01T18:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.487645 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.487694 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.487703 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.487723 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.487733 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:54Z","lastTransitionTime":"2025-12-01T18:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.507339 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.507424 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.507473 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:54 crc kubenswrapper[4935]: E1201 18:30:54.507518 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:54 crc kubenswrapper[4935]: E1201 18:30:54.507653 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:54 crc kubenswrapper[4935]: E1201 18:30:54.507732 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.590527 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.590567 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.590577 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.590595 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.590605 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:54Z","lastTransitionTime":"2025-12-01T18:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.693179 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.693222 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.693233 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.693255 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.693275 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:54Z","lastTransitionTime":"2025-12-01T18:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.796693 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.796781 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.796801 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.796832 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.796855 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:54Z","lastTransitionTime":"2025-12-01T18:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.899940 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.900000 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.900018 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.900048 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:54 crc kubenswrapper[4935]: I1201 18:30:54.900066 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:54Z","lastTransitionTime":"2025-12-01T18:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.003028 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.003076 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.003086 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.003106 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.003116 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:55Z","lastTransitionTime":"2025-12-01T18:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.105699 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.105752 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.105765 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.105782 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.105794 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:55Z","lastTransitionTime":"2025-12-01T18:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.208459 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.208497 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.208508 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.208528 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.208538 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:55Z","lastTransitionTime":"2025-12-01T18:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.311295 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.311341 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.311351 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.311368 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.311377 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:55Z","lastTransitionTime":"2025-12-01T18:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.414328 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.414383 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.414401 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.414426 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.414445 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:55Z","lastTransitionTime":"2025-12-01T18:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.507742 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:55 crc kubenswrapper[4935]: E1201 18:30:55.507944 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.516853 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.516903 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.516913 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.516932 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.516945 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:55Z","lastTransitionTime":"2025-12-01T18:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.619767 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.619830 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.619864 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.619883 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.619895 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:55Z","lastTransitionTime":"2025-12-01T18:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.728016 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.728082 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.728097 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.728121 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.728135 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:55Z","lastTransitionTime":"2025-12-01T18:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.831410 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.831458 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.831472 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.831493 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.831508 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:55Z","lastTransitionTime":"2025-12-01T18:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.935231 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.935308 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.935325 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.935355 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:55 crc kubenswrapper[4935]: I1201 18:30:55.935378 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:55Z","lastTransitionTime":"2025-12-01T18:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.038386 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.038442 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.038451 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.038476 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.038489 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:56Z","lastTransitionTime":"2025-12-01T18:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.141141 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.141212 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.141225 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.141246 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.141278 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:56Z","lastTransitionTime":"2025-12-01T18:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.246029 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.246069 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.246082 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.246100 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.246112 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:56Z","lastTransitionTime":"2025-12-01T18:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.349251 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.349301 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.349316 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.349336 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.349352 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:56Z","lastTransitionTime":"2025-12-01T18:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.451644 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.451696 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.451706 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.451722 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.451733 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:56Z","lastTransitionTime":"2025-12-01T18:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.507495 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.507530 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.507537 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:56 crc kubenswrapper[4935]: E1201 18:30:56.507677 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:56 crc kubenswrapper[4935]: E1201 18:30:56.507779 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:56 crc kubenswrapper[4935]: E1201 18:30:56.507880 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.522664 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.538200 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.551318 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.554380 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.554421 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.554431 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.554448 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.554458 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:56Z","lastTransitionTime":"2025-12-01T18:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.564475 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.577695 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.591921 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.607294 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.619900 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.640740 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:42Z\\\",\\\"message\\\":\\\" for endpointslice openshift-cluster-version/cluster-version-operator-xvdnk as it is not a known egress service\\\\nI1201 18:30:42.356513 6613 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-console/downloads for endpointslice openshift-console/downloads-mt5b4 as it is not a known egress service\\\\nI1201 18:30:42.356602 6613 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.356970 6613 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357195 6613 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357520 6613 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 18:30:42.357833 6613 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.358369 6613 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:42.358405 6613 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.656903 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.656957 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.656967 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.656985 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.656999 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:56Z","lastTransitionTime":"2025-12-01T18:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.664055 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.681953 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.717994 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.744594 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.759057 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.759559 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.759587 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.759599 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.759615 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.759625 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:56Z","lastTransitionTime":"2025-12-01T18:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.772548 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.787401 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.807444 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.824008 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:56Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.862553 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.862600 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.862614 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.862639 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.862655 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:56Z","lastTransitionTime":"2025-12-01T18:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.964958 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.964999 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.965009 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.965026 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:56 crc kubenswrapper[4935]: I1201 18:30:56.965038 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:56Z","lastTransitionTime":"2025-12-01T18:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.068111 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.068206 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.068228 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.068261 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.068284 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:57Z","lastTransitionTime":"2025-12-01T18:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.171120 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.171199 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.171208 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.171223 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.171233 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:57Z","lastTransitionTime":"2025-12-01T18:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.274357 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.274427 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.274441 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.274463 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.274477 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:57Z","lastTransitionTime":"2025-12-01T18:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.378451 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.378505 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.378516 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.378536 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.378548 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:57Z","lastTransitionTime":"2025-12-01T18:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.481476 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.481505 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.481516 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.481529 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.481537 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:57Z","lastTransitionTime":"2025-12-01T18:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.507456 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:57 crc kubenswrapper[4935]: E1201 18:30:57.507871 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.584712 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.584766 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.584776 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.584795 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.584807 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:57Z","lastTransitionTime":"2025-12-01T18:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.688237 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.688318 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.688336 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.688367 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.688386 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:57Z","lastTransitionTime":"2025-12-01T18:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.792117 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.792188 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.792202 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.792221 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.792236 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:57Z","lastTransitionTime":"2025-12-01T18:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.896412 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.896475 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.896484 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.896502 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.896512 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:57Z","lastTransitionTime":"2025-12-01T18:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.960924 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:57 crc kubenswrapper[4935]: E1201 18:30:57.961313 4935 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:57 crc kubenswrapper[4935]: E1201 18:30:57.961582 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs podName:a3c94c79-953e-4cac-b6c4-e98aeef74928 nodeName:}" failed. No retries permitted until 2025-12-01 18:31:29.96140531 +0000 UTC m=+103.983034609 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs") pod "network-metrics-daemon-8jhtj" (UID: "a3c94c79-953e-4cac-b6c4-e98aeef74928") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.999525 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.999567 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.999578 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.999592 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:57 crc kubenswrapper[4935]: I1201 18:30:57.999604 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:57Z","lastTransitionTime":"2025-12-01T18:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.102133 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.102190 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.102206 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.102222 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.102231 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:58Z","lastTransitionTime":"2025-12-01T18:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.204999 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.205052 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.205071 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.205091 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.205104 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:58Z","lastTransitionTime":"2025-12-01T18:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.308117 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.308229 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.308252 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.308283 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.308305 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:58Z","lastTransitionTime":"2025-12-01T18:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.411248 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.411297 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.411307 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.411324 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.411336 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:58Z","lastTransitionTime":"2025-12-01T18:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.507630 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.507698 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.507630 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:30:58 crc kubenswrapper[4935]: E1201 18:30:58.507771 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:30:58 crc kubenswrapper[4935]: E1201 18:30:58.507879 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:30:58 crc kubenswrapper[4935]: E1201 18:30:58.508031 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.513677 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.513727 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.513739 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.513753 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.513763 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:58Z","lastTransitionTime":"2025-12-01T18:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.616805 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.616857 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.616870 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.616887 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.616904 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:58Z","lastTransitionTime":"2025-12-01T18:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.719563 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.719617 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.719640 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.719662 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.719678 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:58Z","lastTransitionTime":"2025-12-01T18:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.821835 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.821885 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.821894 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.821911 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.821923 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:58Z","lastTransitionTime":"2025-12-01T18:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.924581 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.924639 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.924693 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.924719 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.924773 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:58Z","lastTransitionTime":"2025-12-01T18:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.994946 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.994994 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.995007 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.995026 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:58 crc kubenswrapper[4935]: I1201 18:30:58.995038 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:58Z","lastTransitionTime":"2025-12-01T18:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:59 crc kubenswrapper[4935]: E1201 18:30:59.008340 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.012470 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.012521 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.012535 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.012560 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.012572 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:59Z","lastTransitionTime":"2025-12-01T18:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.021924 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzx4x_3f7b45c6-7cf7-420d-afb3-ea00b791af58/kube-multus/0.log" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.021993 4935 generic.go:334] "Generic (PLEG): container finished" podID="3f7b45c6-7cf7-420d-afb3-ea00b791af58" containerID="b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d" exitCode=1 Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.022035 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzx4x" event={"ID":"3f7b45c6-7cf7-420d-afb3-ea00b791af58","Type":"ContainerDied","Data":"b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d"} Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.022586 4935 scope.go:117] "RemoveContainer" containerID="b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d" Dec 01 18:30:59 crc kubenswrapper[4935]: E1201 18:30:59.025187 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.028687 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.028722 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.028737 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.028763 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.028777 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:59Z","lastTransitionTime":"2025-12-01T18:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.037697 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: E1201 18:30:59.039883 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.050010 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.050051 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.050061 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.050079 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.050089 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:59Z","lastTransitionTime":"2025-12-01T18:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.055905 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: E1201 18:30:59.061801 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.065031 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.065063 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.065071 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.065086 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.065095 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:59Z","lastTransitionTime":"2025-12-01T18:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.067881 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: E1201 18:30:59.076695 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: E1201 18:30:59.076850 4935 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.078822 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.078855 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.078867 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.078884 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.078897 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:59Z","lastTransitionTime":"2025-12-01T18:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.079244 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.091331 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.111331 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.123186 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.136655 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.154437 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"message\\\":\\\"2025-12-01T18:30:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94\\\\n2025-12-01T18:30:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94 to /host/opt/cni/bin/\\\\n2025-12-01T18:30:13Z [verbose] multus-daemon started\\\\n2025-12-01T18:30:13Z [verbose] Readiness Indicator file check\\\\n2025-12-01T18:30:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.167283 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.181169 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.181217 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.181225 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.181243 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.181254 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:59Z","lastTransitionTime":"2025-12-01T18:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.187263 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:42Z\\\",\\\"message\\\":\\\" for endpointslice openshift-cluster-version/cluster-version-operator-xvdnk as it is not a known egress service\\\\nI1201 18:30:42.356513 6613 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-console/downloads for endpointslice openshift-console/downloads-mt5b4 as it is not a known egress service\\\\nI1201 18:30:42.356602 6613 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.356970 6613 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357195 6613 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357520 6613 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 18:30:42.357833 6613 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.358369 6613 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:42.358405 6613 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.237599 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.252723 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.265512 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.280630 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.283447 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.283606 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.283721 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.283851 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.283973 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:59Z","lastTransitionTime":"2025-12-01T18:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.290917 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.302294 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.314933 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:30:59Z is after 2025-08-24T17:21:41Z" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.387525 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.387578 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.387590 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.387608 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.387622 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:59Z","lastTransitionTime":"2025-12-01T18:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.490421 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.490608 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.490701 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.490826 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.490912 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:59Z","lastTransitionTime":"2025-12-01T18:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.507918 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:30:59 crc kubenswrapper[4935]: E1201 18:30:59.508109 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.509349 4935 scope.go:117] "RemoveContainer" containerID="7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7" Dec 01 18:30:59 crc kubenswrapper[4935]: E1201 18:30:59.509637 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.594778 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.594866 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.594887 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.594909 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.594921 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:59Z","lastTransitionTime":"2025-12-01T18:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.697730 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.697788 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.697796 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.697813 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.697824 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:59Z","lastTransitionTime":"2025-12-01T18:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.801104 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.801141 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.801159 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.801174 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.801183 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:59Z","lastTransitionTime":"2025-12-01T18:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.904613 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.904681 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.904700 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.904814 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:30:59 crc kubenswrapper[4935]: I1201 18:30:59.904868 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:30:59Z","lastTransitionTime":"2025-12-01T18:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.007928 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.007969 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.007977 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.007993 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.008003 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:00Z","lastTransitionTime":"2025-12-01T18:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.027725 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzx4x_3f7b45c6-7cf7-420d-afb3-ea00b791af58/kube-multus/0.log" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.027792 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzx4x" event={"ID":"3f7b45c6-7cf7-420d-afb3-ea00b791af58","Type":"ContainerStarted","Data":"5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868"} Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.045562 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.067182 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.086769 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.104189 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.110004 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.110061 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.110074 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.110094 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.110110 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:00Z","lastTransitionTime":"2025-12-01T18:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.116975 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.132454 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.152831 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.171783 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.190488 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.204966 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.212594 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.212621 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.212629 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.212644 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.212653 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:00Z","lastTransitionTime":"2025-12-01T18:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.221629 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.235292 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.265092 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.284999 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.303868 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.314548 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.314577 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.314590 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.314607 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.314616 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:00Z","lastTransitionTime":"2025-12-01T18:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.321478 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"message\\\":\\\"2025-12-01T18:30:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94\\\\n2025-12-01T18:30:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94 to /host/opt/cni/bin/\\\\n2025-12-01T18:30:13Z [verbose] multus-daemon started\\\\n2025-12-01T18:30:13Z [verbose] Readiness Indicator file check\\\\n2025-12-01T18:30:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.337975 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.365952 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:42Z\\\",\\\"message\\\":\\\" for endpointslice openshift-cluster-version/cluster-version-operator-xvdnk as it is not a known egress service\\\\nI1201 18:30:42.356513 6613 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-console/downloads for endpointslice openshift-console/downloads-mt5b4 as it is not a known egress service\\\\nI1201 18:30:42.356602 6613 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.356970 6613 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357195 6613 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357520 6613 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 18:30:42.357833 6613 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.358369 6613 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:42.358405 6613 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:00Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.417959 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.418022 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.418036 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.418057 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.418072 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:00Z","lastTransitionTime":"2025-12-01T18:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.507623 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.507702 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.507735 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:00 crc kubenswrapper[4935]: E1201 18:31:00.507789 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:00 crc kubenswrapper[4935]: E1201 18:31:00.507899 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:00 crc kubenswrapper[4935]: E1201 18:31:00.508106 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.520466 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.520516 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.520525 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.520545 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.520556 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:00Z","lastTransitionTime":"2025-12-01T18:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.623905 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.623988 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.623998 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.624037 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.624048 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:00Z","lastTransitionTime":"2025-12-01T18:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.727222 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.727284 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.727296 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.727323 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.727340 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:00Z","lastTransitionTime":"2025-12-01T18:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.830004 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.830066 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.830076 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.830093 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.830104 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:00Z","lastTransitionTime":"2025-12-01T18:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.933772 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.933839 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.933859 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.933886 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:00 crc kubenswrapper[4935]: I1201 18:31:00.933907 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:00Z","lastTransitionTime":"2025-12-01T18:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.036272 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.036334 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.036358 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.036375 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.036387 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:01Z","lastTransitionTime":"2025-12-01T18:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.139823 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.139884 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.139897 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.139917 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.139932 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:01Z","lastTransitionTime":"2025-12-01T18:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.242896 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.242942 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.242952 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.242970 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.242985 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:01Z","lastTransitionTime":"2025-12-01T18:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.346483 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.346568 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.346594 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.346630 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.346651 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:01Z","lastTransitionTime":"2025-12-01T18:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.449501 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.449591 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.449613 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.449648 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.449670 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:01Z","lastTransitionTime":"2025-12-01T18:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.507722 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:01 crc kubenswrapper[4935]: E1201 18:31:01.507946 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.553116 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.553179 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.553188 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.553206 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.553216 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:01Z","lastTransitionTime":"2025-12-01T18:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.656460 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.656520 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.656533 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.656558 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.656572 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:01Z","lastTransitionTime":"2025-12-01T18:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.759493 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.759563 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.759584 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.759610 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.759631 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:01Z","lastTransitionTime":"2025-12-01T18:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.863310 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.863386 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.863409 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.863436 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.863456 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:01Z","lastTransitionTime":"2025-12-01T18:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.967001 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.967065 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.967087 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.967118 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:01 crc kubenswrapper[4935]: I1201 18:31:01.967141 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:01Z","lastTransitionTime":"2025-12-01T18:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.070294 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.070357 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.070380 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.070412 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.070432 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:02Z","lastTransitionTime":"2025-12-01T18:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.174316 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.174421 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.174449 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.174486 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.174521 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:02Z","lastTransitionTime":"2025-12-01T18:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.277900 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.277949 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.277961 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.277985 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.278000 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:02Z","lastTransitionTime":"2025-12-01T18:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.381821 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.381902 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.381923 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.381956 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.381981 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:02Z","lastTransitionTime":"2025-12-01T18:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.485715 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.485769 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.485787 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.485814 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.485831 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:02Z","lastTransitionTime":"2025-12-01T18:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.507576 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:02 crc kubenswrapper[4935]: E1201 18:31:02.507787 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.507582 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:02 crc kubenswrapper[4935]: E1201 18:31:02.507920 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.507982 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:02 crc kubenswrapper[4935]: E1201 18:31:02.508408 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.590095 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.590204 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.590246 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.590284 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.590310 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:02Z","lastTransitionTime":"2025-12-01T18:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.694452 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.694531 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.694544 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.694602 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.694615 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:02Z","lastTransitionTime":"2025-12-01T18:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.804726 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.804784 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.804795 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.804814 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.804829 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:02Z","lastTransitionTime":"2025-12-01T18:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.908649 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.908715 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.908733 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.908761 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:02 crc kubenswrapper[4935]: I1201 18:31:02.908781 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:02Z","lastTransitionTime":"2025-12-01T18:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.013175 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.013269 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.013384 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.013483 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.013533 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:03Z","lastTransitionTime":"2025-12-01T18:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.117781 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.117884 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.117904 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.117931 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.117949 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:03Z","lastTransitionTime":"2025-12-01T18:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.221405 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.221458 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.221468 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.221487 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.221504 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:03Z","lastTransitionTime":"2025-12-01T18:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.325174 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.325247 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.325260 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.325282 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.325297 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:03Z","lastTransitionTime":"2025-12-01T18:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.428594 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.428680 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.428699 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.428729 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.428797 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:03Z","lastTransitionTime":"2025-12-01T18:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.507764 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:03 crc kubenswrapper[4935]: E1201 18:31:03.508054 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.526635 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.531959 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.532039 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.532060 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.532095 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.532120 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:03Z","lastTransitionTime":"2025-12-01T18:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.634895 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.634981 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.635002 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.635042 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.635066 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:03Z","lastTransitionTime":"2025-12-01T18:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.738248 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.738322 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.738342 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.738371 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.738390 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:03Z","lastTransitionTime":"2025-12-01T18:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.841873 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.841938 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.841955 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.841974 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.841986 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:03Z","lastTransitionTime":"2025-12-01T18:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.944624 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.944693 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.944717 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.944752 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:03 crc kubenswrapper[4935]: I1201 18:31:03.944778 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:03Z","lastTransitionTime":"2025-12-01T18:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.047399 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.047467 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.047490 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.047519 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.047537 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:04Z","lastTransitionTime":"2025-12-01T18:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.150925 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.151004 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.151025 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.151055 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.151076 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:04Z","lastTransitionTime":"2025-12-01T18:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.254878 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.254956 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.254982 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.255015 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.255039 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:04Z","lastTransitionTime":"2025-12-01T18:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.358763 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.358851 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.358897 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.358933 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.358958 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:04Z","lastTransitionTime":"2025-12-01T18:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.466790 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.466857 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.466871 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.466893 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.466911 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:04Z","lastTransitionTime":"2025-12-01T18:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.507910 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.508001 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:04 crc kubenswrapper[4935]: E1201 18:31:04.508091 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.508183 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:04 crc kubenswrapper[4935]: E1201 18:31:04.508250 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:04 crc kubenswrapper[4935]: E1201 18:31:04.508427 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.570668 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.570761 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.570836 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.570869 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.570890 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:04Z","lastTransitionTime":"2025-12-01T18:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.674499 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.674573 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.674593 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.674621 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.674642 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:04Z","lastTransitionTime":"2025-12-01T18:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.778870 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.778945 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.778965 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.778994 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.779012 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:04Z","lastTransitionTime":"2025-12-01T18:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.883256 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.883332 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.883353 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.883382 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.883400 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:04Z","lastTransitionTime":"2025-12-01T18:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.987269 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.987341 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.987358 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.987381 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:04 crc kubenswrapper[4935]: I1201 18:31:04.987398 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:04Z","lastTransitionTime":"2025-12-01T18:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.090628 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.090689 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.090705 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.090730 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.090744 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:05Z","lastTransitionTime":"2025-12-01T18:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.194447 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.194538 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.194553 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.194574 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.194613 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:05Z","lastTransitionTime":"2025-12-01T18:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.297161 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.297209 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.297223 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.297242 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.297256 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:05Z","lastTransitionTime":"2025-12-01T18:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.400132 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.400189 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.400202 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.400219 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.400230 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:05Z","lastTransitionTime":"2025-12-01T18:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.503370 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.503424 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.503437 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.503455 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.503469 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:05Z","lastTransitionTime":"2025-12-01T18:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.508231 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:05 crc kubenswrapper[4935]: E1201 18:31:05.508466 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.606782 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.606831 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.606843 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.606861 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.606874 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:05Z","lastTransitionTime":"2025-12-01T18:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.709219 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.709292 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.709313 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.709339 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.709357 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:05Z","lastTransitionTime":"2025-12-01T18:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.812128 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.812243 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.812262 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.812290 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.812315 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:05Z","lastTransitionTime":"2025-12-01T18:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.915114 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.915223 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.915254 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.915290 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:05 crc kubenswrapper[4935]: I1201 18:31:05.915319 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:05Z","lastTransitionTime":"2025-12-01T18:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.018407 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.018478 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.018506 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.018539 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.018565 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:06Z","lastTransitionTime":"2025-12-01T18:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.122502 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.122567 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.122585 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.122613 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.122632 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:06Z","lastTransitionTime":"2025-12-01T18:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.226886 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.226959 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.226979 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.227009 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.227030 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:06Z","lastTransitionTime":"2025-12-01T18:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.330441 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.330507 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.330527 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.330557 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.330576 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:06Z","lastTransitionTime":"2025-12-01T18:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.433496 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.433566 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.433584 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.433614 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.433633 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:06Z","lastTransitionTime":"2025-12-01T18:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.508056 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.508085 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:06 crc kubenswrapper[4935]: E1201 18:31:06.508967 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.508122 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:06 crc kubenswrapper[4935]: E1201 18:31:06.509046 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:06 crc kubenswrapper[4935]: E1201 18:31:06.509378 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.531187 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.542780 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.542860 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.543766 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.543834 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.543852 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:06Z","lastTransitionTime":"2025-12-01T18:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.556666 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.580982 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.600369 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.624711 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.643412 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"message\\\":\\\"2025-12-01T18:30:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94\\\\n2025-12-01T18:30:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94 to /host/opt/cni/bin/\\\\n2025-12-01T18:30:13Z [verbose] multus-daemon started\\\\n2025-12-01T18:30:13Z [verbose] Readiness Indicator file check\\\\n2025-12-01T18:30:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.647665 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.647695 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.647704 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.647724 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.647737 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:06Z","lastTransitionTime":"2025-12-01T18:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.663022 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.688529 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:42Z\\\",\\\"message\\\":\\\" for endpointslice openshift-cluster-version/cluster-version-operator-xvdnk as it is not a known egress service\\\\nI1201 18:30:42.356513 6613 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-console/downloads for endpointslice openshift-console/downloads-mt5b4 as it is not a known egress service\\\\nI1201 18:30:42.356602 6613 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.356970 6613 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357195 6613 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357520 6613 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 18:30:42.357833 6613 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.358369 6613 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:42.358405 6613 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.725993 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.744977 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.750552 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.750627 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.750651 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.750682 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.750702 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:06Z","lastTransitionTime":"2025-12-01T18:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.770895 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.793977 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.811362 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.831007 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.844889 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.854715 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.854780 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.854796 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.854821 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.854840 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:06Z","lastTransitionTime":"2025-12-01T18:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.864671 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.878798 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.892457 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"974b1351-9adb-4f0f-88e6-fd0293f343bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://164dfe29cf41ca1bd53f96a771d1f3f816d690a08586e1e7d71272583a16e348\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.909547 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:06Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.957098 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.957253 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.957274 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.957305 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:06 crc kubenswrapper[4935]: I1201 18:31:06.957324 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:06Z","lastTransitionTime":"2025-12-01T18:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.060118 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.060184 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.060194 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.060210 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.060221 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:07Z","lastTransitionTime":"2025-12-01T18:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.163511 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.163606 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.163629 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.163672 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.163698 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:07Z","lastTransitionTime":"2025-12-01T18:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.266859 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.266921 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.266933 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.266957 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.267017 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:07Z","lastTransitionTime":"2025-12-01T18:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.370993 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.371058 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.371074 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.371102 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.371123 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:07Z","lastTransitionTime":"2025-12-01T18:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.474963 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.475047 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.475075 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.475113 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.475136 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:07Z","lastTransitionTime":"2025-12-01T18:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.507254 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:07 crc kubenswrapper[4935]: E1201 18:31:07.507506 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.578031 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.578121 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.578140 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.578227 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.578248 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:07Z","lastTransitionTime":"2025-12-01T18:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.681475 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.681543 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.681558 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.681581 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.681622 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:07Z","lastTransitionTime":"2025-12-01T18:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.785269 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.785386 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.785398 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.785417 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.785430 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:07Z","lastTransitionTime":"2025-12-01T18:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.889724 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.889781 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.889794 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.889817 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.889832 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:07Z","lastTransitionTime":"2025-12-01T18:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.993072 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.993138 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.993190 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.993224 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:07 crc kubenswrapper[4935]: I1201 18:31:07.993249 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:07Z","lastTransitionTime":"2025-12-01T18:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.096741 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.096792 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.096800 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.096815 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.096825 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:08Z","lastTransitionTime":"2025-12-01T18:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.200374 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.200454 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.200473 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.200505 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.200526 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:08Z","lastTransitionTime":"2025-12-01T18:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.303465 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.303535 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.303552 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.303593 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.303611 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:08Z","lastTransitionTime":"2025-12-01T18:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.406439 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.406482 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.406490 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.406506 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.406515 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:08Z","lastTransitionTime":"2025-12-01T18:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.500273 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.500465 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.500505 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.500552 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.500580 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.500744 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.500766 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.500781 4935 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.500841 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 18:32:12.500824434 +0000 UTC m=+146.522453693 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.500917 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:12.500907897 +0000 UTC m=+146.522537166 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.500976 4935 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.501006 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:32:12.50099743 +0000 UTC m=+146.522626689 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.501066 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.501080 4935 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.501091 4935 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.501120 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 18:32:12.501111253 +0000 UTC m=+146.522740522 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.501178 4935 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.501207 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 18:32:12.501198506 +0000 UTC m=+146.522827765 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.507268 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.507452 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.507465 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.507527 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.507676 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:08 crc kubenswrapper[4935]: E1201 18:31:08.507753 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.509584 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.509634 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.509651 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.509676 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.509696 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:08Z","lastTransitionTime":"2025-12-01T18:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.612998 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.613043 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.613057 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.613076 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.613090 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:08Z","lastTransitionTime":"2025-12-01T18:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.716699 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.716753 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.716770 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.716795 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.716813 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:08Z","lastTransitionTime":"2025-12-01T18:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.820072 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.820127 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.820141 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.820186 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.820202 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:08Z","lastTransitionTime":"2025-12-01T18:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.923249 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.923301 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.923312 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.923329 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:08 crc kubenswrapper[4935]: I1201 18:31:08.923338 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:08Z","lastTransitionTime":"2025-12-01T18:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.026440 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.026516 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.026539 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.026571 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.026591 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.129841 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.129901 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.129917 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.129944 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.129964 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.233186 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.233251 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.233269 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.233294 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.233308 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.336320 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.336366 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.336375 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.336391 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.336400 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.434686 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.434752 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.434793 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.434826 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.434849 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: E1201 18:31:09.459265 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:09Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.465183 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.465327 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.465359 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.465393 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.465423 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: E1201 18:31:09.486416 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:09Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.491279 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.491338 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.491351 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.491375 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.491390 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.507328 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:09 crc kubenswrapper[4935]: E1201 18:31:09.507333 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:09Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:09 crc kubenswrapper[4935]: E1201 18:31:09.507602 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.511895 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.511958 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.511973 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.511997 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.512011 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: E1201 18:31:09.524554 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:09Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.530810 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.530878 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.530892 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.530914 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.530928 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: E1201 18:31:09.546607 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:09Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:09 crc kubenswrapper[4935]: E1201 18:31:09.546821 4935 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.548807 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.548859 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.548870 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.548891 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.548904 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.658767 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.658872 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.658902 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.658941 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.658979 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.762946 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.763025 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.763044 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.763072 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.763093 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.866977 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.867063 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.867081 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.867108 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.867126 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.970692 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.970765 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.970791 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.970827 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:09 crc kubenswrapper[4935]: I1201 18:31:09.970849 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:09Z","lastTransitionTime":"2025-12-01T18:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.073724 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.073789 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.073805 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.073828 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.073844 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:10Z","lastTransitionTime":"2025-12-01T18:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.177985 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.178055 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.178073 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.178104 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.178126 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:10Z","lastTransitionTime":"2025-12-01T18:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.281805 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.281866 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.281878 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.281900 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.281913 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:10Z","lastTransitionTime":"2025-12-01T18:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.384435 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.384506 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.384523 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.384548 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.384566 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:10Z","lastTransitionTime":"2025-12-01T18:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.487415 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.487532 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.487546 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.487568 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.487582 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:10Z","lastTransitionTime":"2025-12-01T18:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.507737 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.507832 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:10 crc kubenswrapper[4935]: E1201 18:31:10.508212 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:10 crc kubenswrapper[4935]: E1201 18:31:10.508429 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.508519 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:10 crc kubenswrapper[4935]: E1201 18:31:10.508663 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.591701 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.591787 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.591806 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.591834 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.591853 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:10Z","lastTransitionTime":"2025-12-01T18:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.696530 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.696603 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.696627 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.696660 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.696680 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:10Z","lastTransitionTime":"2025-12-01T18:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.802186 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.802262 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.802282 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.802311 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.802332 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:10Z","lastTransitionTime":"2025-12-01T18:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.906031 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.906100 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.906124 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.906192 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:10 crc kubenswrapper[4935]: I1201 18:31:10.906222 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:10Z","lastTransitionTime":"2025-12-01T18:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.010231 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.010286 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.010303 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.010327 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.010345 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:11Z","lastTransitionTime":"2025-12-01T18:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.113749 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.113807 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.113818 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.113837 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.113853 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:11Z","lastTransitionTime":"2025-12-01T18:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.216817 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.216855 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.216865 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.216887 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.217083 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:11Z","lastTransitionTime":"2025-12-01T18:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.320181 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.320227 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.320242 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.320275 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.320296 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:11Z","lastTransitionTime":"2025-12-01T18:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.423511 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.423563 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.423573 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.423591 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.423601 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:11Z","lastTransitionTime":"2025-12-01T18:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.507066 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:11 crc kubenswrapper[4935]: E1201 18:31:11.507344 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.508060 4935 scope.go:117] "RemoveContainer" containerID="7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.527008 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.527075 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.527100 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.527131 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.527188 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:11Z","lastTransitionTime":"2025-12-01T18:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.630273 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.630349 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.630370 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.630402 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.630421 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:11Z","lastTransitionTime":"2025-12-01T18:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.733390 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.733450 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.733471 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.733500 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.733525 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:11Z","lastTransitionTime":"2025-12-01T18:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.836691 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.836749 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.836763 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.836786 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.836801 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:11Z","lastTransitionTime":"2025-12-01T18:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.940793 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.940849 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.940866 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.940891 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:11 crc kubenswrapper[4935]: I1201 18:31:11.940912 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:11Z","lastTransitionTime":"2025-12-01T18:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.043552 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.043621 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.043641 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.043666 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.043682 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:12Z","lastTransitionTime":"2025-12-01T18:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.075143 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/2.log" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.078791 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.079924 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerStarted","Data":"b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1"} Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.081114 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.107399 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.125137 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.141099 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.146291 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.146332 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.146342 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.146367 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.146381 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:12Z","lastTransitionTime":"2025-12-01T18:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.155612 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.171557 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.182800 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.200731 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.222091 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"message\\\":\\\"2025-12-01T18:30:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94\\\\n2025-12-01T18:30:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94 to /host/opt/cni/bin/\\\\n2025-12-01T18:30:13Z [verbose] multus-daemon started\\\\n2025-12-01T18:30:13Z [verbose] Readiness Indicator file check\\\\n2025-12-01T18:30:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.234193 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.249164 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.249239 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.249255 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.249274 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.249290 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:12Z","lastTransitionTime":"2025-12-01T18:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.259005 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:42Z\\\",\\\"message\\\":\\\" for endpointslice openshift-cluster-version/cluster-version-operator-xvdnk as it is not a known egress service\\\\nI1201 18:30:42.356513 6613 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-console/downloads for endpointslice openshift-console/downloads-mt5b4 as it is not a known egress service\\\\nI1201 18:30:42.356602 6613 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.356970 6613 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357195 6613 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357520 6613 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 18:30:42.357833 6613 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.358369 6613 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:42.358405 6613 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:31:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.283747 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.301681 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.317186 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.339802 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.352465 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.352500 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.352513 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.352532 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.352547 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:12Z","lastTransitionTime":"2025-12-01T18:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.354776 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.370069 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.388417 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.405322 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.417598 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"974b1351-9adb-4f0f-88e6-fd0293f343bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://164dfe29cf41ca1bd53f96a771d1f3f816d690a08586e1e7d71272583a16e348\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:12Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.455530 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.455583 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.455594 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.455612 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.455627 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:12Z","lastTransitionTime":"2025-12-01T18:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.507802 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.507848 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.507970 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:12 crc kubenswrapper[4935]: E1201 18:31:12.508064 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:12 crc kubenswrapper[4935]: E1201 18:31:12.508282 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:12 crc kubenswrapper[4935]: E1201 18:31:12.508332 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.558526 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.558594 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.558606 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.558628 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.558640 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:12Z","lastTransitionTime":"2025-12-01T18:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.661687 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.661756 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.661775 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.661805 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.661825 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:12Z","lastTransitionTime":"2025-12-01T18:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.764368 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.764402 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.764410 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.764426 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.764436 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:12Z","lastTransitionTime":"2025-12-01T18:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.866689 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.866729 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.866743 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.866764 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.866779 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:12Z","lastTransitionTime":"2025-12-01T18:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.969240 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.969300 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.969318 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.969342 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:12 crc kubenswrapper[4935]: I1201 18:31:12.969359 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:12Z","lastTransitionTime":"2025-12-01T18:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.072292 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.072351 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.072369 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.072393 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.072411 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:13Z","lastTransitionTime":"2025-12-01T18:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.086934 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/3.log" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.088045 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/2.log" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.091935 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.093485 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1" exitCode=1 Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.093535 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1"} Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.093582 4935 scope.go:117] "RemoveContainer" containerID="7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.094220 4935 scope.go:117] "RemoveContainer" containerID="b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1" Dec 01 18:31:13 crc kubenswrapper[4935]: E1201 18:31:13.094423 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.115439 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"974b1351-9adb-4f0f-88e6-fd0293f343bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://164dfe29cf41ca1bd53f96a771d1f3f816d690a08586e1e7d71272583a16e348\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.136024 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.152786 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.170380 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.176538 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.176595 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.176613 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.176711 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.176741 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:13Z","lastTransitionTime":"2025-12-01T18:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.190255 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.216671 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.239448 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.255691 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"message\\\":\\\"2025-12-01T18:30:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94\\\\n2025-12-01T18:30:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94 to /host/opt/cni/bin/\\\\n2025-12-01T18:30:13Z [verbose] multus-daemon started\\\\n2025-12-01T18:30:13Z [verbose] Readiness Indicator file check\\\\n2025-12-01T18:30:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.268707 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.279861 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.279921 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.279936 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.279960 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.279975 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:13Z","lastTransitionTime":"2025-12-01T18:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.294379 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d069a497837fe1c655a4c67866b893083393b38b399b3430a9bfceb97cea1f7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:42Z\\\",\\\"message\\\":\\\" for endpointslice openshift-cluster-version/cluster-version-operator-xvdnk as it is not a known egress service\\\\nI1201 18:30:42.356513 6613 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-console/downloads for endpointslice openshift-console/downloads-mt5b4 as it is not a known egress service\\\\nI1201 18:30:42.356602 6613 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.356970 6613 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357195 6613 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 18:30:42.357520 6613 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1201 18:30:42.357833 6613 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1201 18:30:42.358369 6613 ovnkube.go:599] Stopped ovnkube\\\\nI1201 18:30:42.358405 6613 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 18:30:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:31:12Z\\\",\\\"message\\\":\\\"for openshift-cluster-version/cluster-version-operator for network=default are: map[]\\\\nI1201 18:31:12.618934 7000 services_controller.go:443] Built service openshift-cluster-version/cluster-version-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.182\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9099, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1201 18:31:12.618906 7000 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nI1201 18:31:12.618956 7000 services_controller.go:444] Built service openshift-cluster-version/cluster-version-operator LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1201 18:31:12.618935 7000 services_controller.go:451] Built service openshift-authentication-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityT\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:31:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.320563 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.335389 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.355622 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.383598 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.383670 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.383689 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.383719 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.383739 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:13Z","lastTransitionTime":"2025-12-01T18:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.383634 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.402190 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.418770 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.440314 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.461308 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.478609 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:13Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.486622 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.486705 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.486725 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.486750 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.486768 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:13Z","lastTransitionTime":"2025-12-01T18:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.506945 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:13 crc kubenswrapper[4935]: E1201 18:31:13.507063 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.590432 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.590487 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.590504 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.590530 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.590547 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:13Z","lastTransitionTime":"2025-12-01T18:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.693069 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.693132 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.693180 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.693232 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.693257 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:13Z","lastTransitionTime":"2025-12-01T18:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.797833 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.797907 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.797923 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.797945 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.797961 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:13Z","lastTransitionTime":"2025-12-01T18:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.901731 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.901784 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.901797 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.901819 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:13 crc kubenswrapper[4935]: I1201 18:31:13.901831 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:13Z","lastTransitionTime":"2025-12-01T18:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.004296 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.004384 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.004411 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.004442 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.004466 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:14Z","lastTransitionTime":"2025-12-01T18:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.101339 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/3.log" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.104438 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.106607 4935 scope.go:117] "RemoveContainer" containerID="b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.106609 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.106725 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.106741 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.106765 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.106780 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:14Z","lastTransitionTime":"2025-12-01T18:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:14 crc kubenswrapper[4935]: E1201 18:31:14.106945 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.122098 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"974b1351-9adb-4f0f-88e6-fd0293f343bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://164dfe29cf41ca1bd53f96a771d1f3f816d690a08586e1e7d71272583a16e348\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.139771 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.157305 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.177586 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.197064 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.209721 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.209800 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.209821 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.209849 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.209872 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:14Z","lastTransitionTime":"2025-12-01T18:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.218264 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.231454 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.248761 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:31:12Z\\\",\\\"message\\\":\\\"for openshift-cluster-version/cluster-version-operator for network=default are: map[]\\\\nI1201 18:31:12.618934 7000 services_controller.go:443] Built service openshift-cluster-version/cluster-version-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.182\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9099, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1201 18:31:12.618906 7000 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nI1201 18:31:12.618956 7000 services_controller.go:444] Built service openshift-cluster-version/cluster-version-operator LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1201 18:31:12.618935 7000 services_controller.go:451] Built service openshift-authentication-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityT\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:31:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.267181 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.279399 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.295319 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.310633 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"message\\\":\\\"2025-12-01T18:30:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94\\\\n2025-12-01T18:30:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94 to /host/opt/cni/bin/\\\\n2025-12-01T18:30:13Z [verbose] multus-daemon started\\\\n2025-12-01T18:30:13Z [verbose] Readiness Indicator file check\\\\n2025-12-01T18:30:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.312814 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.312849 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.312874 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.312983 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.312992 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:14Z","lastTransitionTime":"2025-12-01T18:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.338418 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.353024 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.368538 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.390115 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.409948 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.419247 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.419284 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.419292 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.419310 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.419324 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:14Z","lastTransitionTime":"2025-12-01T18:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.433403 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.452396 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:14Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.507062 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.507105 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:14 crc kubenswrapper[4935]: E1201 18:31:14.507241 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.507323 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:14 crc kubenswrapper[4935]: E1201 18:31:14.507562 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:14 crc kubenswrapper[4935]: E1201 18:31:14.507694 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.521393 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.521428 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.521438 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.521455 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.521466 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:14Z","lastTransitionTime":"2025-12-01T18:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.625295 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.625376 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.625399 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.625449 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.625474 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:14Z","lastTransitionTime":"2025-12-01T18:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.728674 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.728745 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.728758 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.728783 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.728801 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:14Z","lastTransitionTime":"2025-12-01T18:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.831360 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.831416 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.831427 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.831444 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.831458 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:14Z","lastTransitionTime":"2025-12-01T18:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.935140 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.935308 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.935332 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.935365 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:14 crc kubenswrapper[4935]: I1201 18:31:14.935387 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:14Z","lastTransitionTime":"2025-12-01T18:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.038188 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.038671 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.038690 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.038722 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.038745 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:15Z","lastTransitionTime":"2025-12-01T18:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.141832 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.141906 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.141924 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.141955 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.141980 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:15Z","lastTransitionTime":"2025-12-01T18:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.245325 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.245410 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.245433 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.245465 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.245485 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:15Z","lastTransitionTime":"2025-12-01T18:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.353455 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.353516 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.353535 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.353561 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.353580 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:15Z","lastTransitionTime":"2025-12-01T18:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.457425 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.457600 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.457624 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.457689 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.457709 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:15Z","lastTransitionTime":"2025-12-01T18:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.507414 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:15 crc kubenswrapper[4935]: E1201 18:31:15.507762 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.562833 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.562872 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.562884 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.562934 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.562947 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:15Z","lastTransitionTime":"2025-12-01T18:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.667047 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.667110 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.667127 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.667192 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.667218 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:15Z","lastTransitionTime":"2025-12-01T18:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.770753 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.770839 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.770851 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.770874 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.770913 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:15Z","lastTransitionTime":"2025-12-01T18:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.873539 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.873584 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.873593 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.873615 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.873626 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:15Z","lastTransitionTime":"2025-12-01T18:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.976562 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.976620 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.976639 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.976663 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:15 crc kubenswrapper[4935]: I1201 18:31:15.976680 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:15Z","lastTransitionTime":"2025-12-01T18:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.080559 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.080612 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.080621 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.080638 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.080646 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:16Z","lastTransitionTime":"2025-12-01T18:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.184120 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.184208 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.184230 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.184255 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.184276 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:16Z","lastTransitionTime":"2025-12-01T18:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.288334 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.288394 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.288411 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.288436 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.288456 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:16Z","lastTransitionTime":"2025-12-01T18:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.391173 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.391214 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.391225 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.391243 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.391254 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:16Z","lastTransitionTime":"2025-12-01T18:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.493972 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.494016 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.494030 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.494047 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.494059 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:16Z","lastTransitionTime":"2025-12-01T18:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.507537 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.507594 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:16 crc kubenswrapper[4935]: E1201 18:31:16.507684 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.507552 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:16 crc kubenswrapper[4935]: E1201 18:31:16.507903 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:16 crc kubenswrapper[4935]: E1201 18:31:16.507981 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.525871 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.540418 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.556004 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.574814 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.585556 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.596507 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.596575 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.596593 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.596618 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.596636 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:16Z","lastTransitionTime":"2025-12-01T18:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.597265 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.608402 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"974b1351-9adb-4f0f-88e6-fd0293f343bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://164dfe29cf41ca1bd53f96a771d1f3f816d690a08586e1e7d71272583a16e348\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.623692 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.639006 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.653411 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.666214 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.677344 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.688986 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.698556 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.698603 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.698620 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.698663 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.698676 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:16Z","lastTransitionTime":"2025-12-01T18:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.706434 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.718193 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.729235 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.741855 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"message\\\":\\\"2025-12-01T18:30:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94\\\\n2025-12-01T18:30:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94 to /host/opt/cni/bin/\\\\n2025-12-01T18:30:13Z [verbose] multus-daemon started\\\\n2025-12-01T18:30:13Z [verbose] Readiness Indicator file check\\\\n2025-12-01T18:30:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.751431 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.773077 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:31:12Z\\\",\\\"message\\\":\\\"for openshift-cluster-version/cluster-version-operator for network=default are: map[]\\\\nI1201 18:31:12.618934 7000 services_controller.go:443] Built service openshift-cluster-version/cluster-version-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.182\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9099, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1201 18:31:12.618906 7000 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nI1201 18:31:12.618956 7000 services_controller.go:444] Built service openshift-cluster-version/cluster-version-operator LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1201 18:31:12.618935 7000 services_controller.go:451] Built service openshift-authentication-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityT\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:31:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:16Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.800921 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.800963 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.800980 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.801003 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.801016 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:16Z","lastTransitionTime":"2025-12-01T18:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.903730 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.903775 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.903787 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.903804 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:16 crc kubenswrapper[4935]: I1201 18:31:16.903817 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:16Z","lastTransitionTime":"2025-12-01T18:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.007537 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.007591 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.007601 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.007618 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.007630 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:17Z","lastTransitionTime":"2025-12-01T18:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.110755 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.110805 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.110816 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.110835 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.110848 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:17Z","lastTransitionTime":"2025-12-01T18:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.214115 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.214168 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.214179 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.214197 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.214208 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:17Z","lastTransitionTime":"2025-12-01T18:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.316564 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.316609 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.316619 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.316636 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.316646 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:17Z","lastTransitionTime":"2025-12-01T18:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.419299 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.419338 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.419346 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.419360 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.419369 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:17Z","lastTransitionTime":"2025-12-01T18:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.507601 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:17 crc kubenswrapper[4935]: E1201 18:31:17.507841 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.525999 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.526760 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.526799 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.526827 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.526846 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:17Z","lastTransitionTime":"2025-12-01T18:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.630003 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.630066 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.630078 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.630099 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.630113 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:17Z","lastTransitionTime":"2025-12-01T18:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.732379 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.732472 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.732492 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.732522 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.732543 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:17Z","lastTransitionTime":"2025-12-01T18:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.835501 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.835562 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.835573 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.835593 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.835608 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:17Z","lastTransitionTime":"2025-12-01T18:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.938709 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.938790 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.938812 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.938840 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:17 crc kubenswrapper[4935]: I1201 18:31:17.938858 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:17Z","lastTransitionTime":"2025-12-01T18:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.041719 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.041792 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.041803 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.041827 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.041844 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:18Z","lastTransitionTime":"2025-12-01T18:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.144432 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.144501 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.144527 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.144557 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.144581 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:18Z","lastTransitionTime":"2025-12-01T18:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.247930 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.247990 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.248008 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.248037 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.248056 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:18Z","lastTransitionTime":"2025-12-01T18:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.351503 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.351547 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.351558 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.351574 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.351587 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:18Z","lastTransitionTime":"2025-12-01T18:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.455101 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.455140 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.455167 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.455182 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.455191 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:18Z","lastTransitionTime":"2025-12-01T18:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.507643 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.507706 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:18 crc kubenswrapper[4935]: E1201 18:31:18.507843 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:18 crc kubenswrapper[4935]: E1201 18:31:18.508050 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.508228 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:18 crc kubenswrapper[4935]: E1201 18:31:18.508458 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.557734 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.557805 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.557819 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.557838 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.557852 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:18Z","lastTransitionTime":"2025-12-01T18:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.661073 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.661172 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.661192 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.661219 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.661237 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:18Z","lastTransitionTime":"2025-12-01T18:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.764821 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.764887 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.764909 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.764936 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.764954 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:18Z","lastTransitionTime":"2025-12-01T18:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.868011 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.868075 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.868092 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.868120 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.868175 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:18Z","lastTransitionTime":"2025-12-01T18:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.971387 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.971450 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.971462 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.971480 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:18 crc kubenswrapper[4935]: I1201 18:31:18.971494 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:18Z","lastTransitionTime":"2025-12-01T18:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.074379 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.074447 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.074463 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.074487 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.074504 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:19Z","lastTransitionTime":"2025-12-01T18:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.177810 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.177884 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.177904 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.177935 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.177959 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:19Z","lastTransitionTime":"2025-12-01T18:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.280790 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.280878 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.280892 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.280915 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.280930 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:19Z","lastTransitionTime":"2025-12-01T18:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.383953 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.384029 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.384047 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.384079 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.384100 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:19Z","lastTransitionTime":"2025-12-01T18:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.487779 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.487835 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.487854 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.487882 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.487902 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:19Z","lastTransitionTime":"2025-12-01T18:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.507375 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:19 crc kubenswrapper[4935]: E1201 18:31:19.507564 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.591720 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.591803 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.591830 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.591862 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.591888 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:19Z","lastTransitionTime":"2025-12-01T18:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.694970 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.695040 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.695065 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.695092 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.695115 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:19Z","lastTransitionTime":"2025-12-01T18:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.798478 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.798532 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.798546 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.798566 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.798579 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:19Z","lastTransitionTime":"2025-12-01T18:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.902032 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.902110 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.902128 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.902185 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.902210 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:19Z","lastTransitionTime":"2025-12-01T18:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.931004 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.931068 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.931088 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.931116 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.931140 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:19Z","lastTransitionTime":"2025-12-01T18:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:19 crc kubenswrapper[4935]: E1201 18:31:19.951270 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.956978 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.957043 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.957061 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.957090 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.957112 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:19Z","lastTransitionTime":"2025-12-01T18:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:19 crc kubenswrapper[4935]: E1201 18:31:19.975603 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.980818 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.980876 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.980889 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.980910 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:19 crc kubenswrapper[4935]: I1201 18:31:19.980926 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:19Z","lastTransitionTime":"2025-12-01T18:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:19 crc kubenswrapper[4935]: E1201 18:31:19.997044 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:19Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.002422 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.002495 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.002552 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.002584 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.002604 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:20Z","lastTransitionTime":"2025-12-01T18:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:20 crc kubenswrapper[4935]: E1201 18:31:20.017476 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.021938 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.022003 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.022020 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.022044 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.022061 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:20Z","lastTransitionTime":"2025-12-01T18:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:20 crc kubenswrapper[4935]: E1201 18:31:20.040258 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:20Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:20 crc kubenswrapper[4935]: E1201 18:31:20.040489 4935 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.042356 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.042402 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.042419 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.042448 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.042475 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:20Z","lastTransitionTime":"2025-12-01T18:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.145983 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.146046 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.146056 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.146079 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.146092 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:20Z","lastTransitionTime":"2025-12-01T18:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.249767 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.249823 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.249832 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.249849 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.249858 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:20Z","lastTransitionTime":"2025-12-01T18:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.352911 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.352964 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.352977 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.352993 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.353004 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:20Z","lastTransitionTime":"2025-12-01T18:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.456444 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.456497 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.456506 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.456542 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.456555 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:20Z","lastTransitionTime":"2025-12-01T18:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.507465 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.507465 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.507691 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:20 crc kubenswrapper[4935]: E1201 18:31:20.507886 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:20 crc kubenswrapper[4935]: E1201 18:31:20.508058 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:20 crc kubenswrapper[4935]: E1201 18:31:20.508276 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.559367 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.559434 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.559447 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.559470 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.559484 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:20Z","lastTransitionTime":"2025-12-01T18:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.662219 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.662268 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.662276 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.662293 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.662303 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:20Z","lastTransitionTime":"2025-12-01T18:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.765235 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.765286 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.765299 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.765318 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.765328 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:20Z","lastTransitionTime":"2025-12-01T18:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.868309 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.868378 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.868395 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.868420 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.868441 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:20Z","lastTransitionTime":"2025-12-01T18:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.971352 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.971413 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.971428 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.971451 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:20 crc kubenswrapper[4935]: I1201 18:31:20.971464 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:20Z","lastTransitionTime":"2025-12-01T18:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.074705 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.074757 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.074766 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.074785 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.074796 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:21Z","lastTransitionTime":"2025-12-01T18:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.177518 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.177568 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.177579 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.177596 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.177607 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:21Z","lastTransitionTime":"2025-12-01T18:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.280213 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.280251 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.280261 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.280277 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.280287 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:21Z","lastTransitionTime":"2025-12-01T18:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.384193 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.384253 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.384265 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.384285 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.384298 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:21Z","lastTransitionTime":"2025-12-01T18:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.487292 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.487338 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.487350 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.487367 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.487377 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:21Z","lastTransitionTime":"2025-12-01T18:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.507980 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:21 crc kubenswrapper[4935]: E1201 18:31:21.508191 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.590505 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.590544 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.590557 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.590571 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.590581 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:21Z","lastTransitionTime":"2025-12-01T18:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.693078 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.693117 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.693129 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.693165 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.693177 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:21Z","lastTransitionTime":"2025-12-01T18:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.796582 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.796643 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.796661 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.796687 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.796705 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:21Z","lastTransitionTime":"2025-12-01T18:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.900176 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.900249 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.900264 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.900304 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:21 crc kubenswrapper[4935]: I1201 18:31:21.900319 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:21Z","lastTransitionTime":"2025-12-01T18:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.003761 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.003835 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.003853 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.003880 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.003899 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:22Z","lastTransitionTime":"2025-12-01T18:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.106812 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.106883 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.106901 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.106930 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.106951 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:22Z","lastTransitionTime":"2025-12-01T18:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.209507 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.209549 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.209562 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.209577 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.209590 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:22Z","lastTransitionTime":"2025-12-01T18:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.312516 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.312559 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.312569 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.312585 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.312597 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:22Z","lastTransitionTime":"2025-12-01T18:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.415095 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.415173 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.415192 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.415218 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.415266 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:22Z","lastTransitionTime":"2025-12-01T18:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.507195 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.507237 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:22 crc kubenswrapper[4935]: E1201 18:31:22.507475 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:22 crc kubenswrapper[4935]: E1201 18:31:22.507597 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.507694 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:22 crc kubenswrapper[4935]: E1201 18:31:22.507831 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.518510 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.518551 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.518563 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.518577 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.518587 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:22Z","lastTransitionTime":"2025-12-01T18:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.621020 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.621052 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.621062 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.621079 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.621089 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:22Z","lastTransitionTime":"2025-12-01T18:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.723746 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.723786 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.723799 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.723817 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.723832 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:22Z","lastTransitionTime":"2025-12-01T18:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.827335 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.827393 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.827407 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.827428 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.827440 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:22Z","lastTransitionTime":"2025-12-01T18:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.929954 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.930024 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.930042 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.930075 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:22 crc kubenswrapper[4935]: I1201 18:31:22.930120 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:22Z","lastTransitionTime":"2025-12-01T18:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.033581 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.033650 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.033660 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.033678 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.033704 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:23Z","lastTransitionTime":"2025-12-01T18:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.136897 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.136988 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.137006 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.137063 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.137082 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:23Z","lastTransitionTime":"2025-12-01T18:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.240819 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.240893 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.240911 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.240936 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.240955 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:23Z","lastTransitionTime":"2025-12-01T18:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.344120 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.344219 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.344232 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.344251 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.344263 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:23Z","lastTransitionTime":"2025-12-01T18:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.447806 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.447889 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.447908 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.447940 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.447961 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:23Z","lastTransitionTime":"2025-12-01T18:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.507460 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:23 crc kubenswrapper[4935]: E1201 18:31:23.508096 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.551396 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.551460 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.551474 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.551502 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.551517 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:23Z","lastTransitionTime":"2025-12-01T18:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.654278 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.654354 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.654367 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.654384 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.654397 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:23Z","lastTransitionTime":"2025-12-01T18:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.757663 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.757739 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.757758 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.757790 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.757815 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:23Z","lastTransitionTime":"2025-12-01T18:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.860298 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.860369 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.860394 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.860427 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.860446 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:23Z","lastTransitionTime":"2025-12-01T18:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.964006 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.964085 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.964094 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.964114 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:23 crc kubenswrapper[4935]: I1201 18:31:23.964124 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:23Z","lastTransitionTime":"2025-12-01T18:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.067380 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.067448 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.067465 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.067492 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.067510 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:24Z","lastTransitionTime":"2025-12-01T18:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.170783 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.170846 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.170860 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.170880 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.170893 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:24Z","lastTransitionTime":"2025-12-01T18:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.274647 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.274724 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.274749 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.274788 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.274815 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:24Z","lastTransitionTime":"2025-12-01T18:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.378742 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.379656 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.379693 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.379724 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.379743 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:24Z","lastTransitionTime":"2025-12-01T18:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.483821 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.483887 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.483905 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.483933 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.483953 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:24Z","lastTransitionTime":"2025-12-01T18:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.507732 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.507745 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.507988 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:24 crc kubenswrapper[4935]: E1201 18:31:24.507929 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:24 crc kubenswrapper[4935]: E1201 18:31:24.508208 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:24 crc kubenswrapper[4935]: E1201 18:31:24.508301 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.587812 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.587880 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.587900 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.587925 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.587947 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:24Z","lastTransitionTime":"2025-12-01T18:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.691086 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.691135 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.691180 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.691206 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.691222 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:24Z","lastTransitionTime":"2025-12-01T18:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.794385 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.794463 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.794492 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.794524 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.794547 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:24Z","lastTransitionTime":"2025-12-01T18:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.898918 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.898996 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.899019 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.899047 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:24 crc kubenswrapper[4935]: I1201 18:31:24.899066 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:24Z","lastTransitionTime":"2025-12-01T18:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.002423 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.002522 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.002571 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.002596 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.002644 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:25Z","lastTransitionTime":"2025-12-01T18:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.104932 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.105027 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.105075 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.105101 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.105119 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:25Z","lastTransitionTime":"2025-12-01T18:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.209422 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.209520 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.209568 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.209594 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.209611 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:25Z","lastTransitionTime":"2025-12-01T18:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.312967 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.313030 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.313047 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.313075 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.313094 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:25Z","lastTransitionTime":"2025-12-01T18:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.416536 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.416633 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.416660 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.416694 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.416717 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:25Z","lastTransitionTime":"2025-12-01T18:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.508048 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:25 crc kubenswrapper[4935]: E1201 18:31:25.508304 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.519715 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.519766 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.519777 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.519801 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.519814 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:25Z","lastTransitionTime":"2025-12-01T18:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.623428 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.623495 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.623513 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.623541 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.623560 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:25Z","lastTransitionTime":"2025-12-01T18:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.727444 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.727500 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.727518 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.727539 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.727557 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:25Z","lastTransitionTime":"2025-12-01T18:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.830785 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.830864 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.830882 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.830913 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.830934 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:25Z","lastTransitionTime":"2025-12-01T18:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.934297 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.934368 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.934385 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.934411 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:25 crc kubenswrapper[4935]: I1201 18:31:25.934431 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:25Z","lastTransitionTime":"2025-12-01T18:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.038648 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.038733 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.038753 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.038788 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.038814 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:26Z","lastTransitionTime":"2025-12-01T18:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.142296 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.142364 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.142381 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.142410 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.142433 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:26Z","lastTransitionTime":"2025-12-01T18:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.245854 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.245949 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.245971 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.246002 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.246022 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:26Z","lastTransitionTime":"2025-12-01T18:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.349501 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.349565 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.349587 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.349864 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.349887 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:26Z","lastTransitionTime":"2025-12-01T18:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.453541 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.453660 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.453685 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.453710 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.453728 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:26Z","lastTransitionTime":"2025-12-01T18:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.507452 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.507672 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:26 crc kubenswrapper[4935]: E1201 18:31:26.507857 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.507882 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:26 crc kubenswrapper[4935]: E1201 18:31:26.507983 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:26 crc kubenswrapper[4935]: E1201 18:31:26.508287 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.528054 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"974b1351-9adb-4f0f-88e6-fd0293f343bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://164dfe29cf41ca1bd53f96a771d1f3f816d690a08586e1e7d71272583a16e348\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbc40228601eda243190937c78a011133f54583ac0996b5c9c5356f07827c343\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.552238 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.556964 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.557060 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.557090 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.557126 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.557182 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:26Z","lastTransitionTime":"2025-12-01T18:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.578466 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.601913 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a3c215c74fa14847793d36520aed83a0d30a9c585c14bd087a17df05c1db8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.621424 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"adc07034-9cd3-4982-bce2-91a840220814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c661efbfb824c37a4b0bd444f18fb35eee390f3a969c9c19f47752be55583b85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f40fc7d525a5c768c9f1f70c8169658bf5809815bd9549e014f2f891d62bdb5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nwq5x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.643625 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4764b75-296d-4d47-a7b3-ad2f9e3e7452\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf0a871695a752dd531b47130f54e7be02c0444303b8ad5f4e8180dd9943f062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48a57f43573214e6c36ea18fe34bac10c51ff47ae5f87bea04c5095d80b1db7f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88408e7589707a9fbcb954ef3afb09861eeabf54ea6bc2c2b04f73b199e6677f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.659989 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.660070 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.660091 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.660121 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.660140 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:26Z","lastTransitionTime":"2025-12-01T18:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.667074 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"985b40e4-7d32-4219-aa6a-a13bc94263fe\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.689042 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13fe58ba0d23aa2e873591bc8da17946c79136d93e4ee7255d40f058214a7e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.713507 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzx4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b45c6-7cf7-420d-afb3-ea00b791af58\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:58Z\\\",\\\"message\\\":\\\"2025-12-01T18:30:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94\\\\n2025-12-01T18:30:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7f21e24f-bb41-4420-a9c4-8c2f3c05ff94 to /host/opt/cni/bin/\\\\n2025-12-01T18:30:13Z [verbose] multus-daemon started\\\\n2025-12-01T18:30:13Z [verbose] Readiness Indicator file check\\\\n2025-12-01T18:30:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4wzb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzx4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.732262 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-26rsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839b5110-76e3-4c2f-80aa-1f2c0485e231\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c8f4e6ec23a23158507b60efb7c329a274413ef7228266130498d8eb4fafa90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z9xk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-26rsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.755822 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f839cb87-9d0b-44af-a9a9-8a6df524aa62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T18:31:12Z\\\",\\\"message\\\":\\\"for openshift-cluster-version/cluster-version-operator for network=default are: map[]\\\\nI1201 18:31:12.618934 7000 services_controller.go:443] Built service openshift-cluster-version/cluster-version-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.182\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9099, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1201 18:31:12.618906 7000 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nI1201 18:31:12.618956 7000 services_controller.go:444] Built service openshift-cluster-version/cluster-version-operator LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1201 18:31:12.618935 7000 services_controller.go:451] Built service openshift-authentication-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityT\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T18:31:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmptx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4s97m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.763570 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.763639 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.763662 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.763693 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.763711 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:26Z","lastTransitionTime":"2025-12-01T18:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.781376 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2f209b0-1f62-4a3e-944e-90baf7cba34f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4c9a39b7018f09b01c1465deba5d4d979d6cca0af52b415b5d0e8923298cda4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e15cd1488b4a4b9b0709f6c241255953066585c9c35e1f2f9cb22e8e73fb90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://50174802baa55129ba5a2e122bbf42aa7e147244bd6c469e04c0a70509ccb0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://383eb67b52bf0af351f4709a85dcfafb0ee03bbf8a5e921aed17b581245e920c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b31b2f0231499ad07d122735ea116e92c84675024d73c1362b301226e166391f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35b4e99dc7ee6b475aec331cc13e9e7cc1d3a9c252818d7d8af19c6fd50b9f49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03ae352ed11654d120adb605ff09561959a0ae233f677a490781228ce9d417fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://914d0161b0084f92561acfa691ee8998fb2e49773633983baef5899ecdb67561\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.801706 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63484852-2f5b-4d7e-9c2d-533852fa7bba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ede8ff8f972fcd195b9227cb5e3025ec614c5033cf6fc637d0378773204e4692\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e5122e4ebe901ff9d845f36c58a0e236e015b483214910edf99f269463fb560c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://467a0548f97223a17f0554a8912b7c59237cd555ff734fef7686f020f5641095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b92ab646f5300f7a0ea81e9b5d7045dddea6684c6a4222c1be91f0319f4ec5c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:29:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:29:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.820866 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04b86677931e48ee66b67193e2dd78a51cd76efbf6620fd4923d1680f07406d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsmv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zznnp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.841062 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f64dz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b2a9250f-1d40-44de-ace0-dc64bc7bb803\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c55d947745f8c83d2efaeb14e3b1a7df8a5bb200a446534fe16c093ff945e401\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3033a9294de6beb9285618f9b2c743205f77a12b8b3fe1f566fa04d2a21a95\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9763d326b4b6b22ab71a0a5d8d461523b8f06245e7c30835e728cbfb87bb7177\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d623447f038523a8f8e2b05382551b54bdbf095891d192c5aa48a84c46274f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d637ba7226d5cca0859d65e6443551564fb3d6be6d9f1540ca2a6e6d0df9a84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d18e79f7b291ca313408a1afd66164bc987ee574624d462f67a55a74023f3a9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://580fead866b1734c8b876d3e7d9fc5b2d0e25293fe0780aa68891b92c2bd41d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T18:30:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T18:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kd257\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f64dz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.866390 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-79z2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7b7c2198-c4bc-4944-948b-7e12d14f0e53\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://701341e443b42dfcd2e4b0deb5268ab4d329050d143ae31f424676ae53ca5100\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg65l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-79z2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.867741 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.867809 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.867818 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.867838 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.867850 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:26Z","lastTransitionTime":"2025-12-01T18:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.882318 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3c94c79-953e-4cac-b6c4-e98aeef74928\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8d9nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T18:30:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8jhtj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.901165 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.920119 4935 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T18:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ec814e80525f9ff08c8f2d1d98666f6acebe04b5cf56b81b0b79e1a2e5c7424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8590966558c7e041e7d0038358566e78ba349fdba815eb4677c876a6bd984009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T18:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:26Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.971190 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.971252 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.971272 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.971298 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:26 crc kubenswrapper[4935]: I1201 18:31:26.971322 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:26Z","lastTransitionTime":"2025-12-01T18:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.074440 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.074864 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.074882 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.074910 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.074933 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:27Z","lastTransitionTime":"2025-12-01T18:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.178484 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.178532 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.178552 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.178566 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.178587 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:27Z","lastTransitionTime":"2025-12-01T18:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.282521 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.282676 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.282702 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.282741 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.282772 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:27Z","lastTransitionTime":"2025-12-01T18:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.387088 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.387196 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.387221 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.387261 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.387287 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:27Z","lastTransitionTime":"2025-12-01T18:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.490785 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.490863 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.490882 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.490913 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.490934 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:27Z","lastTransitionTime":"2025-12-01T18:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.507275 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:27 crc kubenswrapper[4935]: E1201 18:31:27.507537 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.593860 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.594009 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.594023 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.594039 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.594051 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:27Z","lastTransitionTime":"2025-12-01T18:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.697520 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.697612 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.697641 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.697676 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.697704 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:27Z","lastTransitionTime":"2025-12-01T18:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.800671 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.800741 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.800757 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.800776 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.800807 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:27Z","lastTransitionTime":"2025-12-01T18:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.904216 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.904281 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.904305 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.904332 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:27 crc kubenswrapper[4935]: I1201 18:31:27.904350 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:27Z","lastTransitionTime":"2025-12-01T18:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.008185 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.008261 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.008273 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.008301 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.008327 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:28Z","lastTransitionTime":"2025-12-01T18:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.112647 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.112718 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.112745 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.112777 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.112800 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:28Z","lastTransitionTime":"2025-12-01T18:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.216925 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.217008 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.217028 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.217063 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.217084 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:28Z","lastTransitionTime":"2025-12-01T18:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.320697 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.320772 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.320781 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.320802 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.320813 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:28Z","lastTransitionTime":"2025-12-01T18:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.425127 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.425236 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.425254 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.425280 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.425298 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:28Z","lastTransitionTime":"2025-12-01T18:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.508221 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.508334 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.508345 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:28 crc kubenswrapper[4935]: E1201 18:31:28.508406 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:28 crc kubenswrapper[4935]: E1201 18:31:28.508781 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:28 crc kubenswrapper[4935]: E1201 18:31:28.509456 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.510285 4935 scope.go:117] "RemoveContainer" containerID="b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1" Dec 01 18:31:28 crc kubenswrapper[4935]: E1201 18:31:28.510552 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.528702 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.528774 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.528794 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.528826 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.528846 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:28Z","lastTransitionTime":"2025-12-01T18:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.632311 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.632398 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.632419 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.632452 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.632475 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:28Z","lastTransitionTime":"2025-12-01T18:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.735876 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.735951 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.735968 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.735995 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.736008 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:28Z","lastTransitionTime":"2025-12-01T18:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.839409 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.839456 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.839475 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.839501 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.839523 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:28Z","lastTransitionTime":"2025-12-01T18:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.942308 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.942376 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.942395 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.942421 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:28 crc kubenswrapper[4935]: I1201 18:31:28.942441 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:28Z","lastTransitionTime":"2025-12-01T18:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.046039 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.046106 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.046128 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.046184 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.046205 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:29Z","lastTransitionTime":"2025-12-01T18:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.149613 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.149911 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.149931 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.149969 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.149992 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:29Z","lastTransitionTime":"2025-12-01T18:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.257904 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.258014 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.258025 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.258045 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.258059 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:29Z","lastTransitionTime":"2025-12-01T18:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.361578 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.361641 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.361659 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.361685 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.361705 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:29Z","lastTransitionTime":"2025-12-01T18:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.465062 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.465230 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.465245 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.465303 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.465316 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:29Z","lastTransitionTime":"2025-12-01T18:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.507536 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:29 crc kubenswrapper[4935]: E1201 18:31:29.507783 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.568991 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.569053 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.569070 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.569098 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.569117 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:29Z","lastTransitionTime":"2025-12-01T18:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.672981 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.673117 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.673139 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.673223 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.673242 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:29Z","lastTransitionTime":"2025-12-01T18:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.777399 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.777479 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.777496 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.777523 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.777545 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:29Z","lastTransitionTime":"2025-12-01T18:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.880800 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.880856 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.880869 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.880889 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.880900 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:29Z","lastTransitionTime":"2025-12-01T18:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.984562 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.984632 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.984650 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.984679 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:29 crc kubenswrapper[4935]: I1201 18:31:29.984699 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:29Z","lastTransitionTime":"2025-12-01T18:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.054977 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:30 crc kubenswrapper[4935]: E1201 18:31:30.055342 4935 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:31:30 crc kubenswrapper[4935]: E1201 18:31:30.055469 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs podName:a3c94c79-953e-4cac-b6c4-e98aeef74928 nodeName:}" failed. No retries permitted until 2025-12-01 18:32:34.055435631 +0000 UTC m=+168.077064920 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs") pod "network-metrics-daemon-8jhtj" (UID: "a3c94c79-953e-4cac-b6c4-e98aeef74928") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.088033 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.088091 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.088111 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.088181 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.088215 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:30Z","lastTransitionTime":"2025-12-01T18:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.191466 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.191528 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.191553 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.191583 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.191604 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:30Z","lastTransitionTime":"2025-12-01T18:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.294923 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.295006 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.295029 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.295060 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.295085 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:30Z","lastTransitionTime":"2025-12-01T18:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.387867 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.387938 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.387956 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.387982 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.388001 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:30Z","lastTransitionTime":"2025-12-01T18:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:30 crc kubenswrapper[4935]: E1201 18:31:30.410848 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:30Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.416304 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.416364 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.416375 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.416394 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.416405 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:30Z","lastTransitionTime":"2025-12-01T18:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:30 crc kubenswrapper[4935]: E1201 18:31:30.433854 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:30Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.438894 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.438969 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.438997 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.439036 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.439061 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:30Z","lastTransitionTime":"2025-12-01T18:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:30 crc kubenswrapper[4935]: E1201 18:31:30.454458 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:30Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.459471 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.459503 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.459515 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.459531 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.459549 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:30Z","lastTransitionTime":"2025-12-01T18:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:30 crc kubenswrapper[4935]: E1201 18:31:30.473227 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:30Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.478284 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.478328 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.478345 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.478366 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.478381 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:30Z","lastTransitionTime":"2025-12-01T18:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:30 crc kubenswrapper[4935]: E1201 18:31:30.490326 4935 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T18:31:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9d6c30b1-43b8-4422-8964-f97321d04fb0\\\",\\\"systemUUID\\\":\\\"5ab99546-e564-412e-b1e6-598dab154fb0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T18:31:30Z is after 2025-08-24T17:21:41Z" Dec 01 18:31:30 crc kubenswrapper[4935]: E1201 18:31:30.490616 4935 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.492449 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.492511 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.492537 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.492565 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.492585 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:30Z","lastTransitionTime":"2025-12-01T18:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.507811 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.507829 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.507943 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:30 crc kubenswrapper[4935]: E1201 18:31:30.508200 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:30 crc kubenswrapper[4935]: E1201 18:31:30.508347 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:30 crc kubenswrapper[4935]: E1201 18:31:30.508600 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.595296 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.595379 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.595406 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.595442 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.595467 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:30Z","lastTransitionTime":"2025-12-01T18:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.700882 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.700954 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.700982 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.701019 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.701042 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:30Z","lastTransitionTime":"2025-12-01T18:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.804445 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.804535 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.804561 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.804594 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.804623 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:30Z","lastTransitionTime":"2025-12-01T18:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.914570 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.914660 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.914680 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.914711 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:30 crc kubenswrapper[4935]: I1201 18:31:30.914730 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:30Z","lastTransitionTime":"2025-12-01T18:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.017610 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.017669 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.017691 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.017719 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.017737 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:31Z","lastTransitionTime":"2025-12-01T18:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.121348 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.121423 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.121449 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.121483 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.121506 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:31Z","lastTransitionTime":"2025-12-01T18:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.224772 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.224843 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.224862 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.224890 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.224909 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:31Z","lastTransitionTime":"2025-12-01T18:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.328104 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.328196 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.328221 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.328252 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.328275 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:31Z","lastTransitionTime":"2025-12-01T18:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.432524 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.432697 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.432731 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.432807 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.432839 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:31Z","lastTransitionTime":"2025-12-01T18:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.507521 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:31 crc kubenswrapper[4935]: E1201 18:31:31.507760 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.536685 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.536762 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.536783 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.536812 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.536835 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:31Z","lastTransitionTime":"2025-12-01T18:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.640101 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.640209 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.640235 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.640267 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.640289 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:31Z","lastTransitionTime":"2025-12-01T18:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.744205 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.744286 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.744310 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.744343 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.744365 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:31Z","lastTransitionTime":"2025-12-01T18:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.847430 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.847517 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.847547 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.847581 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.847606 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:31Z","lastTransitionTime":"2025-12-01T18:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.951371 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.951441 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.951454 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.951478 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:31 crc kubenswrapper[4935]: I1201 18:31:31.951493 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:31Z","lastTransitionTime":"2025-12-01T18:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.054408 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.054491 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.054508 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.054534 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.054552 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:32Z","lastTransitionTime":"2025-12-01T18:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.157705 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.157755 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.157768 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.157785 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.157798 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:32Z","lastTransitionTime":"2025-12-01T18:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.261571 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.261819 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.261965 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.262103 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.262265 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:32Z","lastTransitionTime":"2025-12-01T18:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.366054 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.366261 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.366366 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.366499 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.366622 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:32Z","lastTransitionTime":"2025-12-01T18:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.468946 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.469000 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.469018 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.469042 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.469059 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:32Z","lastTransitionTime":"2025-12-01T18:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.507603 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.507766 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:32 crc kubenswrapper[4935]: E1201 18:31:32.507900 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.507974 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:32 crc kubenswrapper[4935]: E1201 18:31:32.508251 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:32 crc kubenswrapper[4935]: E1201 18:31:32.508459 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.571718 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.571767 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.571778 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.571981 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.571991 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:32Z","lastTransitionTime":"2025-12-01T18:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.674793 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.674856 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.674870 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.674892 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.674906 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:32Z","lastTransitionTime":"2025-12-01T18:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.777309 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.777372 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.777393 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.777421 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.777445 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:32Z","lastTransitionTime":"2025-12-01T18:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.880218 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.880275 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.880288 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.880307 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.880318 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:32Z","lastTransitionTime":"2025-12-01T18:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.982671 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.982728 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.982743 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.982793 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:32 crc kubenswrapper[4935]: I1201 18:31:32.982805 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:32Z","lastTransitionTime":"2025-12-01T18:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.086693 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.086791 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.086819 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.086861 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.086886 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:33Z","lastTransitionTime":"2025-12-01T18:31:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.189660 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.189730 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.189750 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.189777 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.189796 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:33Z","lastTransitionTime":"2025-12-01T18:31:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.293909 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.293989 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.294018 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.294054 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.294077 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:33Z","lastTransitionTime":"2025-12-01T18:31:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.398069 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.398183 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.398204 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.398236 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.398256 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:33Z","lastTransitionTime":"2025-12-01T18:31:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.501819 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.501893 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.501914 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.501941 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.501961 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:33Z","lastTransitionTime":"2025-12-01T18:31:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.507984 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:33 crc kubenswrapper[4935]: E1201 18:31:33.508234 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.604888 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.604971 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.604989 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.605015 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.605033 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:33Z","lastTransitionTime":"2025-12-01T18:31:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.708664 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.708731 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.708760 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.708798 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.708826 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:33Z","lastTransitionTime":"2025-12-01T18:31:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.811618 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.811643 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.811653 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.811699 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.811708 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:33Z","lastTransitionTime":"2025-12-01T18:31:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.914746 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.914816 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.914835 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.914864 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:33 crc kubenswrapper[4935]: I1201 18:31:33.914885 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:33Z","lastTransitionTime":"2025-12-01T18:31:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.018488 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.018534 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.018554 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.018575 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.018591 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:34Z","lastTransitionTime":"2025-12-01T18:31:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.121397 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.121476 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.121505 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.121539 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.121560 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:34Z","lastTransitionTime":"2025-12-01T18:31:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.225061 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.225129 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.225186 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.225220 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.225247 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:34Z","lastTransitionTime":"2025-12-01T18:31:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.328921 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.328999 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.329023 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.329048 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.329067 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:34Z","lastTransitionTime":"2025-12-01T18:31:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.432885 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.432948 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.432961 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.432985 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.433000 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:34Z","lastTransitionTime":"2025-12-01T18:31:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.507460 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.507558 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.507571 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:34 crc kubenswrapper[4935]: E1201 18:31:34.507712 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:34 crc kubenswrapper[4935]: E1201 18:31:34.507849 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:34 crc kubenswrapper[4935]: E1201 18:31:34.508067 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.536308 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.536371 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.536390 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.536418 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.536440 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:34Z","lastTransitionTime":"2025-12-01T18:31:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.639511 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.639597 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.639620 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.639654 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.639673 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:34Z","lastTransitionTime":"2025-12-01T18:31:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.742825 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.742875 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.742895 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.742920 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.742942 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:34Z","lastTransitionTime":"2025-12-01T18:31:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.846651 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.846750 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.846778 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.846820 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.846844 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:34Z","lastTransitionTime":"2025-12-01T18:31:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.951192 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.951280 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.951305 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.951338 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:34 crc kubenswrapper[4935]: I1201 18:31:34.951362 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:34Z","lastTransitionTime":"2025-12-01T18:31:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.054949 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.055057 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.055083 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.055127 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.055188 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:35Z","lastTransitionTime":"2025-12-01T18:31:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.158722 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.158808 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.158827 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.158857 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.158883 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:35Z","lastTransitionTime":"2025-12-01T18:31:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.262124 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.262187 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.262199 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.262219 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.262232 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:35Z","lastTransitionTime":"2025-12-01T18:31:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.365441 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.365511 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.365534 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.365565 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.365590 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:35Z","lastTransitionTime":"2025-12-01T18:31:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.468842 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.468903 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.468930 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.468963 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.468982 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:35Z","lastTransitionTime":"2025-12-01T18:31:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.507669 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:35 crc kubenswrapper[4935]: E1201 18:31:35.507869 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.582125 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.582196 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.582211 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.582233 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.582249 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:35Z","lastTransitionTime":"2025-12-01T18:31:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.685457 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.685534 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.685551 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.685577 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.685596 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:35Z","lastTransitionTime":"2025-12-01T18:31:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.789568 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.789646 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.789671 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.789703 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.789728 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:35Z","lastTransitionTime":"2025-12-01T18:31:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.894399 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.894489 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.894512 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.894548 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.894572 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:35Z","lastTransitionTime":"2025-12-01T18:31:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.998452 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.998699 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.998727 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.998761 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:35 crc kubenswrapper[4935]: I1201 18:31:35.998788 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:35Z","lastTransitionTime":"2025-12-01T18:31:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.102335 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.102394 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.102412 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.102438 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.102460 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:36Z","lastTransitionTime":"2025-12-01T18:31:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.205096 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.205194 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.205218 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.205256 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.205284 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:36Z","lastTransitionTime":"2025-12-01T18:31:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.308222 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.308279 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.308294 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.308315 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.308333 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:36Z","lastTransitionTime":"2025-12-01T18:31:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.411090 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.411199 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.411228 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.411254 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.411272 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:36Z","lastTransitionTime":"2025-12-01T18:31:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.507745 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:36 crc kubenswrapper[4935]: E1201 18:31:36.507913 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.507979 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.508010 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:36 crc kubenswrapper[4935]: E1201 18:31:36.508362 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:36 crc kubenswrapper[4935]: E1201 18:31:36.508589 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.516096 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.516205 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.516230 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.516270 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.516293 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:36Z","lastTransitionTime":"2025-12-01T18:31:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.554647 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-f64dz" podStartSLOduration=85.554607909 podStartE2EDuration="1m25.554607909s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:31:36.553984929 +0000 UTC m=+110.575614258" watchObservedRunningTime="2025-12-01 18:31:36.554607909 +0000 UTC m=+110.576237208" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.579754 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-79z2s" podStartSLOduration=85.579722116 podStartE2EDuration="1m25.579722116s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:31:36.579410576 +0000 UTC m=+110.601039845" watchObservedRunningTime="2025-12-01 18:31:36.579722116 +0000 UTC m=+110.601351415" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.621832 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.621876 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.621887 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.621904 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.621918 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:36Z","lastTransitionTime":"2025-12-01T18:31:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.691434 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podStartSLOduration=85.691407044 podStartE2EDuration="1m25.691407044s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:31:36.687344448 +0000 UTC m=+110.708973717" watchObservedRunningTime="2025-12-01 18:31:36.691407044 +0000 UTC m=+110.713036313" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.709441 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=33.709420402 podStartE2EDuration="33.709420402s" podCreationTimestamp="2025-12-01 18:31:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:31:36.707130321 +0000 UTC m=+110.728759590" watchObservedRunningTime="2025-12-01 18:31:36.709420402 +0000 UTC m=+110.731049681" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.725082 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.725117 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.725128 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.725161 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.725178 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:36Z","lastTransitionTime":"2025-12-01T18:31:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.747978 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tk459" podStartSLOduration=85.747958194 podStartE2EDuration="1m25.747958194s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:31:36.747712447 +0000 UTC m=+110.769341706" watchObservedRunningTime="2025-12-01 18:31:36.747958194 +0000 UTC m=+110.769587463" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.783617 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=92.783588447 podStartE2EDuration="1m32.783588447s" podCreationTimestamp="2025-12-01 18:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:31:36.765177208 +0000 UTC m=+110.786806497" watchObservedRunningTime="2025-12-01 18:31:36.783588447 +0000 UTC m=+110.805217716" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.783824 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=92.783819564 podStartE2EDuration="1m32.783819564s" podCreationTimestamp="2025-12-01 18:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:31:36.783773953 +0000 UTC m=+110.805403212" watchObservedRunningTime="2025-12-01 18:31:36.783819564 +0000 UTC m=+110.805448833" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.827831 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.827904 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.827921 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.827948 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.827967 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:36Z","lastTransitionTime":"2025-12-01T18:31:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.828727 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-jzx4x" podStartSLOduration=85.828702364 podStartE2EDuration="1m25.828702364s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:31:36.816626621 +0000 UTC m=+110.838255890" watchObservedRunningTime="2025-12-01 18:31:36.828702364 +0000 UTC m=+110.850331633" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.829296 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-26rsx" podStartSLOduration=85.829290662 podStartE2EDuration="1m25.829290662s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:31:36.828988383 +0000 UTC m=+110.850617652" watchObservedRunningTime="2025-12-01 18:31:36.829290662 +0000 UTC m=+110.850919931" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.890451 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=92.890419065 podStartE2EDuration="1m32.890419065s" podCreationTimestamp="2025-12-01 18:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:31:36.888769234 +0000 UTC m=+110.910398503" watchObservedRunningTime="2025-12-01 18:31:36.890419065 +0000 UTC m=+110.912048344" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.906739 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=61.906707179 podStartE2EDuration="1m1.906707179s" podCreationTimestamp="2025-12-01 18:30:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:31:36.905961816 +0000 UTC m=+110.927591085" watchObservedRunningTime="2025-12-01 18:31:36.906707179 +0000 UTC m=+110.928336478" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.931247 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.931319 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.931340 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.931371 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:36 crc kubenswrapper[4935]: I1201 18:31:36.931391 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:36Z","lastTransitionTime":"2025-12-01T18:31:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.034856 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.034943 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.034962 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.034992 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.035014 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:37Z","lastTransitionTime":"2025-12-01T18:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.137046 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.137111 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.137129 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.137189 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.137215 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:37Z","lastTransitionTime":"2025-12-01T18:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.240848 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.240920 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.240938 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.240972 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.240995 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:37Z","lastTransitionTime":"2025-12-01T18:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.344016 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.344073 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.344088 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.344112 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.344127 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:37Z","lastTransitionTime":"2025-12-01T18:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.447329 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.447672 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.447905 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.448132 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.448395 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:37Z","lastTransitionTime":"2025-12-01T18:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.507942 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:37 crc kubenswrapper[4935]: E1201 18:31:37.508272 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.552358 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.552477 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.552497 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.552526 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.552547 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:37Z","lastTransitionTime":"2025-12-01T18:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.655824 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.655872 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.655884 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.655907 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.655921 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:37Z","lastTransitionTime":"2025-12-01T18:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.759333 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.759380 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.759393 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.759418 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.759432 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:37Z","lastTransitionTime":"2025-12-01T18:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.863379 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.863447 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.863465 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.863497 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.863519 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:37Z","lastTransitionTime":"2025-12-01T18:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.966551 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.966610 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.966621 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.966649 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:37 crc kubenswrapper[4935]: I1201 18:31:37.966666 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:37Z","lastTransitionTime":"2025-12-01T18:31:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.069985 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.070040 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.070051 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.070069 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.070084 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:38Z","lastTransitionTime":"2025-12-01T18:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.173938 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.174011 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.174030 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.174061 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.174083 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:38Z","lastTransitionTime":"2025-12-01T18:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.277090 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.278651 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.278888 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.279092 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.279339 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:38Z","lastTransitionTime":"2025-12-01T18:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.386644 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.386740 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.386758 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.386783 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.386804 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:38Z","lastTransitionTime":"2025-12-01T18:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.489939 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.490026 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.490052 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.490080 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.490112 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:38Z","lastTransitionTime":"2025-12-01T18:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.507416 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.507439 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.507504 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:38 crc kubenswrapper[4935]: E1201 18:31:38.507648 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:38 crc kubenswrapper[4935]: E1201 18:31:38.507755 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:38 crc kubenswrapper[4935]: E1201 18:31:38.508131 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.593612 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.594187 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.594388 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.594604 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.594837 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:38Z","lastTransitionTime":"2025-12-01T18:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.698257 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.698327 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.698349 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.698373 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.698390 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:38Z","lastTransitionTime":"2025-12-01T18:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.802222 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.802723 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.802937 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.803127 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.803550 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:38Z","lastTransitionTime":"2025-12-01T18:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.906845 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.907218 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.907413 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.907619 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:38 crc kubenswrapper[4935]: I1201 18:31:38.907808 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:38Z","lastTransitionTime":"2025-12-01T18:31:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.011755 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.011829 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.011854 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.011885 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.011912 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:39Z","lastTransitionTime":"2025-12-01T18:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.115529 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.115583 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.115602 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.115626 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.115645 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:39Z","lastTransitionTime":"2025-12-01T18:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.218876 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.218927 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.218942 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.218967 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.218986 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:39Z","lastTransitionTime":"2025-12-01T18:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.322646 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.322732 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.322748 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.322772 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.322788 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:39Z","lastTransitionTime":"2025-12-01T18:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.430684 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.430758 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.430779 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.430854 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.430913 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:39Z","lastTransitionTime":"2025-12-01T18:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.507673 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:39 crc kubenswrapper[4935]: E1201 18:31:39.507964 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.534897 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.534955 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.534973 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.535001 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.535022 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:39Z","lastTransitionTime":"2025-12-01T18:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.638484 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.638544 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.638562 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.638590 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.638608 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:39Z","lastTransitionTime":"2025-12-01T18:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.742105 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.742253 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.742276 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.742313 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.742335 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:39Z","lastTransitionTime":"2025-12-01T18:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.845868 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.845936 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.845956 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.845986 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.846009 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:39Z","lastTransitionTime":"2025-12-01T18:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.950918 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.950998 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.951017 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.951718 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:39 crc kubenswrapper[4935]: I1201 18:31:39.951775 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:39Z","lastTransitionTime":"2025-12-01T18:31:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.055874 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.055940 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.055961 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.055993 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.056018 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:40Z","lastTransitionTime":"2025-12-01T18:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.160191 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.160269 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.160288 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.160315 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.160335 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:40Z","lastTransitionTime":"2025-12-01T18:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.263595 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.263732 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.263785 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.263824 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.263849 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:40Z","lastTransitionTime":"2025-12-01T18:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.366911 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.366966 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.366987 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.367016 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.367037 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:40Z","lastTransitionTime":"2025-12-01T18:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.470942 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.471009 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.471025 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.471048 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.471063 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:40Z","lastTransitionTime":"2025-12-01T18:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.507948 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.508015 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:40 crc kubenswrapper[4935]: E1201 18:31:40.508193 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.508223 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:40 crc kubenswrapper[4935]: E1201 18:31:40.508379 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:40 crc kubenswrapper[4935]: E1201 18:31:40.508609 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.574890 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.574967 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.574996 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.575034 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.575059 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:40Z","lastTransitionTime":"2025-12-01T18:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.678961 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.679039 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.679058 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.679091 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.679115 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:40Z","lastTransitionTime":"2025-12-01T18:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.782572 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.782660 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.782684 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.782720 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.782745 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:40Z","lastTransitionTime":"2025-12-01T18:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.798653 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.798707 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.798728 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.798756 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.798773 4935 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T18:31:40Z","lastTransitionTime":"2025-12-01T18:31:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.876786 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj"] Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.877424 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.881404 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.881638 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.882124 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.882529 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.983217 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.983285 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.983436 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.983506 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:40 crc kubenswrapper[4935]: I1201 18:31:40.983592 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-service-ca\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:41 crc kubenswrapper[4935]: I1201 18:31:41.085250 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:41 crc kubenswrapper[4935]: I1201 18:31:41.085356 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:41 crc kubenswrapper[4935]: I1201 18:31:41.085424 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:41 crc kubenswrapper[4935]: I1201 18:31:41.085460 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:41 crc kubenswrapper[4935]: I1201 18:31:41.085509 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-service-ca\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:41 crc kubenswrapper[4935]: I1201 18:31:41.085688 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:41 crc kubenswrapper[4935]: I1201 18:31:41.085701 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:41 crc kubenswrapper[4935]: I1201 18:31:41.087776 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-service-ca\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:41 crc kubenswrapper[4935]: I1201 18:31:41.095659 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:41 crc kubenswrapper[4935]: I1201 18:31:41.118220 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6b0f959-ed2b-4c66-8a32-3123eae9e3f8-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-8wjsj\" (UID: \"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:41 crc kubenswrapper[4935]: I1201 18:31:41.206654 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" Dec 01 18:31:41 crc kubenswrapper[4935]: I1201 18:31:41.507435 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:41 crc kubenswrapper[4935]: E1201 18:31:41.507673 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:42 crc kubenswrapper[4935]: I1201 18:31:42.216561 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" event={"ID":"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8","Type":"ContainerStarted","Data":"6e0a3a45abdb5ddc2c496214f9b382237560b6275d88eeec7c451993a976fde9"} Dec 01 18:31:42 crc kubenswrapper[4935]: I1201 18:31:42.216649 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" event={"ID":"a6b0f959-ed2b-4c66-8a32-3123eae9e3f8","Type":"ContainerStarted","Data":"af5a6b66b5925d55860308a9efc7761c044ac781e49a980fb84638221b4c8105"} Dec 01 18:31:42 crc kubenswrapper[4935]: I1201 18:31:42.242750 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8wjsj" podStartSLOduration=91.242725568 podStartE2EDuration="1m31.242725568s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:31:42.241528431 +0000 UTC m=+116.263157730" watchObservedRunningTime="2025-12-01 18:31:42.242725568 +0000 UTC m=+116.264354867" Dec 01 18:31:42 crc kubenswrapper[4935]: I1201 18:31:42.507589 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:42 crc kubenswrapper[4935]: I1201 18:31:42.507900 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:42 crc kubenswrapper[4935]: I1201 18:31:42.507990 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:42 crc kubenswrapper[4935]: E1201 18:31:42.508259 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:42 crc kubenswrapper[4935]: E1201 18:31:42.508450 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:42 crc kubenswrapper[4935]: E1201 18:31:42.508614 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:43 crc kubenswrapper[4935]: I1201 18:31:43.508013 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:43 crc kubenswrapper[4935]: E1201 18:31:43.508860 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:43 crc kubenswrapper[4935]: I1201 18:31:43.509635 4935 scope.go:117] "RemoveContainer" containerID="b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1" Dec 01 18:31:43 crc kubenswrapper[4935]: E1201 18:31:43.509974 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4s97m_openshift-ovn-kubernetes(f839cb87-9d0b-44af-a9a9-8a6df524aa62)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" Dec 01 18:31:44 crc kubenswrapper[4935]: I1201 18:31:44.507032 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:44 crc kubenswrapper[4935]: I1201 18:31:44.507132 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:44 crc kubenswrapper[4935]: E1201 18:31:44.507276 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:44 crc kubenswrapper[4935]: I1201 18:31:44.507381 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:44 crc kubenswrapper[4935]: E1201 18:31:44.507573 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:44 crc kubenswrapper[4935]: E1201 18:31:44.507661 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:45 crc kubenswrapper[4935]: I1201 18:31:45.229236 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzx4x_3f7b45c6-7cf7-420d-afb3-ea00b791af58/kube-multus/1.log" Dec 01 18:31:45 crc kubenswrapper[4935]: I1201 18:31:45.229916 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzx4x_3f7b45c6-7cf7-420d-afb3-ea00b791af58/kube-multus/0.log" Dec 01 18:31:45 crc kubenswrapper[4935]: I1201 18:31:45.229957 4935 generic.go:334] "Generic (PLEG): container finished" podID="3f7b45c6-7cf7-420d-afb3-ea00b791af58" containerID="5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868" exitCode=1 Dec 01 18:31:45 crc kubenswrapper[4935]: I1201 18:31:45.229992 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzx4x" event={"ID":"3f7b45c6-7cf7-420d-afb3-ea00b791af58","Type":"ContainerDied","Data":"5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868"} Dec 01 18:31:45 crc kubenswrapper[4935]: I1201 18:31:45.230030 4935 scope.go:117] "RemoveContainer" containerID="b39203902238198dcc07c8c5d2a5a3dd5db28b5ade44c2ef08b47063511eaf7d" Dec 01 18:31:45 crc kubenswrapper[4935]: I1201 18:31:45.230532 4935 scope.go:117] "RemoveContainer" containerID="5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868" Dec 01 18:31:45 crc kubenswrapper[4935]: E1201 18:31:45.230720 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-jzx4x_openshift-multus(3f7b45c6-7cf7-420d-afb3-ea00b791af58)\"" pod="openshift-multus/multus-jzx4x" podUID="3f7b45c6-7cf7-420d-afb3-ea00b791af58" Dec 01 18:31:45 crc kubenswrapper[4935]: I1201 18:31:45.507818 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:45 crc kubenswrapper[4935]: E1201 18:31:45.508097 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:46 crc kubenswrapper[4935]: I1201 18:31:46.237522 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzx4x_3f7b45c6-7cf7-420d-afb3-ea00b791af58/kube-multus/1.log" Dec 01 18:31:46 crc kubenswrapper[4935]: E1201 18:31:46.480045 4935 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 01 18:31:46 crc kubenswrapper[4935]: I1201 18:31:46.507195 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:46 crc kubenswrapper[4935]: I1201 18:31:46.507233 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:46 crc kubenswrapper[4935]: I1201 18:31:46.507446 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:46 crc kubenswrapper[4935]: E1201 18:31:46.509175 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:46 crc kubenswrapper[4935]: E1201 18:31:46.509283 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:46 crc kubenswrapper[4935]: E1201 18:31:46.509425 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:46 crc kubenswrapper[4935]: E1201 18:31:46.614401 4935 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 18:31:47 crc kubenswrapper[4935]: I1201 18:31:47.507383 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:47 crc kubenswrapper[4935]: E1201 18:31:47.508542 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:48 crc kubenswrapper[4935]: I1201 18:31:48.507565 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:48 crc kubenswrapper[4935]: I1201 18:31:48.507645 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:48 crc kubenswrapper[4935]: I1201 18:31:48.507584 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:48 crc kubenswrapper[4935]: E1201 18:31:48.507783 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:48 crc kubenswrapper[4935]: E1201 18:31:48.507891 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:48 crc kubenswrapper[4935]: E1201 18:31:48.508093 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:49 crc kubenswrapper[4935]: I1201 18:31:49.507777 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:49 crc kubenswrapper[4935]: E1201 18:31:49.508045 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:50 crc kubenswrapper[4935]: I1201 18:31:50.508059 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:50 crc kubenswrapper[4935]: I1201 18:31:50.508140 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:50 crc kubenswrapper[4935]: I1201 18:31:50.508159 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:50 crc kubenswrapper[4935]: E1201 18:31:50.508320 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:50 crc kubenswrapper[4935]: E1201 18:31:50.508424 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:50 crc kubenswrapper[4935]: E1201 18:31:50.508583 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:51 crc kubenswrapper[4935]: I1201 18:31:51.507842 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:51 crc kubenswrapper[4935]: E1201 18:31:51.508513 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:51 crc kubenswrapper[4935]: E1201 18:31:51.616211 4935 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 18:31:52 crc kubenswrapper[4935]: I1201 18:31:52.507885 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:52 crc kubenswrapper[4935]: I1201 18:31:52.508007 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:52 crc kubenswrapper[4935]: I1201 18:31:52.507924 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:52 crc kubenswrapper[4935]: E1201 18:31:52.508190 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:52 crc kubenswrapper[4935]: E1201 18:31:52.508330 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:52 crc kubenswrapper[4935]: E1201 18:31:52.508573 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:53 crc kubenswrapper[4935]: I1201 18:31:53.507303 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:53 crc kubenswrapper[4935]: E1201 18:31:53.507542 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:54 crc kubenswrapper[4935]: I1201 18:31:54.507597 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:54 crc kubenswrapper[4935]: I1201 18:31:54.507695 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:54 crc kubenswrapper[4935]: I1201 18:31:54.507663 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:54 crc kubenswrapper[4935]: E1201 18:31:54.507919 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:54 crc kubenswrapper[4935]: E1201 18:31:54.508081 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:54 crc kubenswrapper[4935]: E1201 18:31:54.508203 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:55 crc kubenswrapper[4935]: I1201 18:31:55.506997 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:55 crc kubenswrapper[4935]: E1201 18:31:55.507278 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:56 crc kubenswrapper[4935]: I1201 18:31:56.508246 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:56 crc kubenswrapper[4935]: I1201 18:31:56.508254 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:56 crc kubenswrapper[4935]: I1201 18:31:56.508364 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:56 crc kubenswrapper[4935]: E1201 18:31:56.510477 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:56 crc kubenswrapper[4935]: E1201 18:31:56.510697 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:56 crc kubenswrapper[4935]: E1201 18:31:56.510830 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:56 crc kubenswrapper[4935]: E1201 18:31:56.617665 4935 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 18:31:57 crc kubenswrapper[4935]: I1201 18:31:57.507885 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:57 crc kubenswrapper[4935]: E1201 18:31:57.508114 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:57 crc kubenswrapper[4935]: I1201 18:31:57.509905 4935 scope.go:117] "RemoveContainer" containerID="b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1" Dec 01 18:31:58 crc kubenswrapper[4935]: I1201 18:31:58.288591 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/3.log" Dec 01 18:31:58 crc kubenswrapper[4935]: I1201 18:31:58.292282 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:31:58 crc kubenswrapper[4935]: I1201 18:31:58.293260 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerStarted","Data":"d33f5de50fb51090f5f7b3456f4a107e432af52c9e3fc141f21ee67dee73d018"} Dec 01 18:31:58 crc kubenswrapper[4935]: I1201 18:31:58.293715 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:31:58 crc kubenswrapper[4935]: I1201 18:31:58.331134 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podStartSLOduration=107.331106967 podStartE2EDuration="1m47.331106967s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:31:58.327869787 +0000 UTC m=+132.349499086" watchObservedRunningTime="2025-12-01 18:31:58.331106967 +0000 UTC m=+132.352736266" Dec 01 18:31:58 crc kubenswrapper[4935]: I1201 18:31:58.507494 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:31:58 crc kubenswrapper[4935]: I1201 18:31:58.507552 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:31:58 crc kubenswrapper[4935]: I1201 18:31:58.507734 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:31:58 crc kubenswrapper[4935]: E1201 18:31:58.507916 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:31:58 crc kubenswrapper[4935]: E1201 18:31:58.508091 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:31:58 crc kubenswrapper[4935]: I1201 18:31:58.508107 4935 scope.go:117] "RemoveContainer" containerID="5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868" Dec 01 18:31:58 crc kubenswrapper[4935]: E1201 18:31:58.508202 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:31:58 crc kubenswrapper[4935]: I1201 18:31:58.553334 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-8jhtj"] Dec 01 18:31:58 crc kubenswrapper[4935]: I1201 18:31:58.553482 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:31:58 crc kubenswrapper[4935]: E1201 18:31:58.553600 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:31:59 crc kubenswrapper[4935]: I1201 18:31:59.299538 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzx4x_3f7b45c6-7cf7-420d-afb3-ea00b791af58/kube-multus/1.log" Dec 01 18:31:59 crc kubenswrapper[4935]: I1201 18:31:59.306768 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzx4x" event={"ID":"3f7b45c6-7cf7-420d-afb3-ea00b791af58","Type":"ContainerStarted","Data":"10d7e9da09acf4f48bb842abb0be17f1c105a5670e59294570ce7d7f84b9ed82"} Dec 01 18:32:00 crc kubenswrapper[4935]: I1201 18:32:00.508035 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:32:00 crc kubenswrapper[4935]: I1201 18:32:00.508135 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:32:00 crc kubenswrapper[4935]: I1201 18:32:00.508136 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:32:00 crc kubenswrapper[4935]: I1201 18:32:00.508035 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:32:00 crc kubenswrapper[4935]: E1201 18:32:00.508295 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 18:32:00 crc kubenswrapper[4935]: E1201 18:32:00.508490 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8jhtj" podUID="a3c94c79-953e-4cac-b6c4-e98aeef74928" Dec 01 18:32:00 crc kubenswrapper[4935]: E1201 18:32:00.508615 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 18:32:00 crc kubenswrapper[4935]: E1201 18:32:00.508715 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 18:32:02 crc kubenswrapper[4935]: I1201 18:32:02.507398 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:32:02 crc kubenswrapper[4935]: I1201 18:32:02.507448 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:32:02 crc kubenswrapper[4935]: I1201 18:32:02.507538 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:32:02 crc kubenswrapper[4935]: I1201 18:32:02.507988 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:32:02 crc kubenswrapper[4935]: I1201 18:32:02.511509 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 01 18:32:02 crc kubenswrapper[4935]: I1201 18:32:02.511916 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 01 18:32:02 crc kubenswrapper[4935]: I1201 18:32:02.511942 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 01 18:32:02 crc kubenswrapper[4935]: I1201 18:32:02.512635 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 01 18:32:02 crc kubenswrapper[4935]: I1201 18:32:02.512924 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 01 18:32:02 crc kubenswrapper[4935]: I1201 18:32:02.513967 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.290215 4935 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.331273 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-j75h2"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.333104 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t4jgn"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.333286 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.334303 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.341408 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.341422 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.342717 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.343010 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.343203 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.343991 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.344200 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.344767 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.345039 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.345415 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.345796 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.345967 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.346137 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.346532 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.346594 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.346709 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.346701 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.350861 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-86226"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.351581 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-526sq"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.352042 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.352240 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pgjz6"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.352320 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.352817 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.374969 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-dbvg7"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.390335 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.393333 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.394193 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-r9pw7"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.394376 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.394649 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.394781 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.394884 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.394838 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.395218 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.395264 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.395181 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.395270 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.395517 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.395608 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.395714 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.395729 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.395787 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.403628 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmhvv\" (UniqueName: \"kubernetes.io/projected/6881ae5d-31b3-4749-bd1a-db65599d48d3-kube-api-access-nmhvv\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.403690 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-config\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.403719 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-serving-cert\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.403751 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/efebdc19-5ace-480b-8151-51e2ea78b4e8-encryption-config\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.403825 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-trusted-ca-bundle\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.403915 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.403955 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/efebdc19-5ace-480b-8151-51e2ea78b4e8-audit-dir\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.403991 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zhs4\" (UniqueName: \"kubernetes.io/projected/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-kube-api-access-2zhs4\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404019 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7dc60e7d-1c00-4801-bddb-27852f80fb7e-machine-approver-tls\") pod \"machine-approver-56656f9798-526sq\" (UID: \"7dc60e7d-1c00-4801-bddb-27852f80fb7e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404124 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404183 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efebdc19-5ace-480b-8151-51e2ea78b4e8-serving-cert\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404242 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-audit-policies\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404261 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srvsj\" (UniqueName: \"kubernetes.io/projected/3d6a824d-7416-48c8-8181-0f0a057d10fb-kube-api-access-srvsj\") pod \"openshift-apiserver-operator-796bbdcf4f-pqvcq\" (UID: \"3d6a824d-7416-48c8-8181-0f0a057d10fb\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404299 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404328 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d6a824d-7416-48c8-8181-0f0a057d10fb-config\") pod \"openshift-apiserver-operator-796bbdcf4f-pqvcq\" (UID: \"3d6a824d-7416-48c8-8181-0f0a057d10fb\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404370 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-oauth-serving-cert\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404473 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbjj4\" (UniqueName: \"kubernetes.io/projected/7dc60e7d-1c00-4801-bddb-27852f80fb7e-kube-api-access-tbjj4\") pod \"machine-approver-56656f9798-526sq\" (UID: \"7dc60e7d-1c00-4801-bddb-27852f80fb7e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404504 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3bf20ee6-67c1-47a6-b47e-5de4d187a495-audit-dir\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404536 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404563 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-etcd-serving-ca\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404588 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-serving-cert\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404619 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404753 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-client-ca\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404800 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dc60e7d-1c00-4801-bddb-27852f80fb7e-config\") pod \"machine-approver-56656f9798-526sq\" (UID: \"7dc60e7d-1c00-4801-bddb-27852f80fb7e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404875 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f06747c9-3031-4c32-b9c4-56bc9e28f2c0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-86226\" (UID: \"f06747c9-3031-4c32-b9c4-56bc9e28f2c0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404907 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-audit\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404929 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404936 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.404977 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405003 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405037 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llrgx\" (UniqueName: \"kubernetes.io/projected/3bf20ee6-67c1-47a6-b47e-5de4d187a495-kube-api-access-llrgx\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405061 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-image-import-ca\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405103 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f06747c9-3031-4c32-b9c4-56bc9e28f2c0-proxy-tls\") pod \"machine-config-controller-84d6567774-86226\" (UID: \"f06747c9-3031-4c32-b9c4-56bc9e28f2c0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405121 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405141 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8wgk\" (UniqueName: \"kubernetes.io/projected/efebdc19-5ace-480b-8151-51e2ea78b4e8-kube-api-access-d8wgk\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405192 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-oauth-config\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405216 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-config\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405248 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/efebdc19-5ace-480b-8151-51e2ea78b4e8-node-pullsecrets\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405272 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-config\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405343 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d6a824d-7416-48c8-8181-0f0a057d10fb-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-pqvcq\" (UID: \"3d6a824d-7416-48c8-8181-0f0a057d10fb\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405384 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz7mk\" (UniqueName: \"kubernetes.io/projected/f06747c9-3031-4c32-b9c4-56bc9e28f2c0-kube-api-access-vz7mk\") pod \"machine-config-controller-84d6567774-86226\" (UID: \"f06747c9-3031-4c32-b9c4-56bc9e28f2c0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405438 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-service-ca\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405486 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405523 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/efebdc19-5ace-480b-8151-51e2ea78b4e8-etcd-client\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405600 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-trusted-ca-bundle\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405657 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405697 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405738 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405800 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7dc60e7d-1c00-4801-bddb-27852f80fb7e-auth-proxy-config\") pod \"machine-approver-56656f9798-526sq\" (UID: \"7dc60e7d-1c00-4801-bddb-27852f80fb7e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405381 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.406191 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.406215 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.405875 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.406453 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.406575 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.406703 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.406772 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.406875 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.407121 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.407250 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.407432 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.407626 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.406716 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.407818 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.407743 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.408010 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-jsbfp"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.408175 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.411666 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.412500 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.420002 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-2dvf9"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.420343 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.420372 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-7kn22"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.420526 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.420831 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.420893 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.421395 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jsbfp" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.421536 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.421424 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.421750 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.422390 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.422541 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.422665 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.422668 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.424806 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.425473 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.425741 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.425887 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.426211 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.427508 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.427715 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.428836 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.432872 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.433714 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.434051 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.434437 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.434893 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.435095 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.436425 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.441344 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.441579 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.441830 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.464156 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.465444 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.465958 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.467985 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.472177 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.472444 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.473108 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.476657 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.480355 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.484101 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.486223 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.487436 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.488074 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.488363 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.488467 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.488789 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.489347 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.489529 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.489567 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.489829 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.490096 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.490178 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.490100 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.490263 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.492213 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.492718 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.493018 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.493070 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.493227 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.494166 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.494356 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.494819 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.495015 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.495053 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.495562 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.495568 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.495772 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.495966 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.496717 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.499101 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-944sj"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.499442 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-p95fx"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.499717 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.500284 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.503892 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.504235 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.504251 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.504465 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-lsb2b"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.504738 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.505026 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.505320 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-lsb2b" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.505495 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.505701 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.505898 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507395 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f06747c9-3031-4c32-b9c4-56bc9e28f2c0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-86226\" (UID: \"f06747c9-3031-4c32-b9c4-56bc9e28f2c0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507441 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-audit\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507474 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-config\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507498 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/65a97abc-c92c-4b07-8922-dace15327fb1-etcd-client\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507527 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507548 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-serving-cert\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507565 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/77a79ecb-4a46-43f2-9187-7cd6fc3dc641-images\") pod \"machine-api-operator-5694c8668f-r9pw7\" (UID: \"77a79ecb-4a46-43f2-9187-7cd6fc3dc641\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507590 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507613 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507632 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-client-ca\") pod \"route-controller-manager-6576b87f9c-4dmcv\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507650 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65a97abc-c92c-4b07-8922-dace15327fb1-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507673 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llrgx\" (UniqueName: \"kubernetes.io/projected/3bf20ee6-67c1-47a6-b47e-5de4d187a495-kube-api-access-llrgx\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507691 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-image-import-ca\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507713 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f06747c9-3031-4c32-b9c4-56bc9e28f2c0-proxy-tls\") pod \"machine-config-controller-84d6567774-86226\" (UID: \"f06747c9-3031-4c32-b9c4-56bc9e28f2c0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507732 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7fff11c-c10d-4ac5-b353-75fc431ab510-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5j26w\" (UID: \"a7fff11c-c10d-4ac5-b353-75fc431ab510\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507762 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhhbv\" (UniqueName: \"kubernetes.io/projected/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-kube-api-access-vhhbv\") pod \"route-controller-manager-6576b87f9c-4dmcv\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507787 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8wgk\" (UniqueName: \"kubernetes.io/projected/efebdc19-5ace-480b-8151-51e2ea78b4e8-kube-api-access-d8wgk\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507807 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-oauth-config\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507827 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-etcd-client\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507844 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-serving-cert\") pod \"route-controller-manager-6576b87f9c-4dmcv\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507867 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-config\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507885 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/77a79ecb-4a46-43f2-9187-7cd6fc3dc641-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-r9pw7\" (UID: \"77a79ecb-4a46-43f2-9187-7cd6fc3dc641\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507902 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74qmg\" (UniqueName: \"kubernetes.io/projected/77a79ecb-4a46-43f2-9187-7cd6fc3dc641-kube-api-access-74qmg\") pod \"machine-api-operator-5694c8668f-r9pw7\" (UID: \"77a79ecb-4a46-43f2-9187-7cd6fc3dc641\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507936 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2145f358-3d68-4239-9267-cfe321b24ec3-proxy-tls\") pod \"machine-config-operator-74547568cd-5wx92\" (UID: \"2145f358-3d68-4239-9267-cfe321b24ec3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507959 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/efebdc19-5ace-480b-8151-51e2ea78b4e8-node-pullsecrets\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.507980 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-config\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.508000 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/535dd9f1-99c2-430e-82ff-0a148a0331e7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.508025 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/65a97abc-c92c-4b07-8922-dace15327fb1-audit-dir\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.508051 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77a79ecb-4a46-43f2-9187-7cd6fc3dc641-config\") pod \"machine-api-operator-5694c8668f-r9pw7\" (UID: \"77a79ecb-4a46-43f2-9187-7cd6fc3dc641\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.508069 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65a97abc-c92c-4b07-8922-dace15327fb1-serving-cert\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.508087 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/535dd9f1-99c2-430e-82ff-0a148a0331e7-serving-cert\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.508124 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d6a824d-7416-48c8-8181-0f0a057d10fb-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-pqvcq\" (UID: \"3d6a824d-7416-48c8-8181-0f0a057d10fb\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.508177 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz7mk\" (UniqueName: \"kubernetes.io/projected/f06747c9-3031-4c32-b9c4-56bc9e28f2c0-kube-api-access-vz7mk\") pod \"machine-config-controller-84d6567774-86226\" (UID: \"f06747c9-3031-4c32-b9c4-56bc9e28f2c0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.508199 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-service-ca\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.508213 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f06747c9-3031-4c32-b9c4-56bc9e28f2c0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-86226\" (UID: \"f06747c9-3031-4c32-b9c4-56bc9e28f2c0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.508217 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk4ht\" (UniqueName: \"kubernetes.io/projected/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-kube-api-access-wk4ht\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.508298 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.508322 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/efebdc19-5ace-480b-8151-51e2ea78b4e8-etcd-client\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.508342 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-trusted-ca-bundle\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509305 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/65a97abc-c92c-4b07-8922-dace15327fb1-encryption-config\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509334 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7dc9\" (UniqueName: \"kubernetes.io/projected/65a97abc-c92c-4b07-8922-dace15327fb1-kube-api-access-p7dc9\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509358 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509379 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-etcd-ca\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509399 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98cg7\" (UniqueName: \"kubernetes.io/projected/a7fff11c-c10d-4ac5-b353-75fc431ab510-kube-api-access-98cg7\") pod \"kube-storage-version-migrator-operator-b67b599dd-5j26w\" (UID: \"a7fff11c-c10d-4ac5-b353-75fc431ab510\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509417 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/65a97abc-c92c-4b07-8922-dace15327fb1-audit-policies\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509439 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509462 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509482 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/65a97abc-c92c-4b07-8922-dace15327fb1-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509512 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7dc60e7d-1c00-4801-bddb-27852f80fb7e-auth-proxy-config\") pod \"machine-approver-56656f9798-526sq\" (UID: \"7dc60e7d-1c00-4801-bddb-27852f80fb7e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509530 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/94cfb221-c0cb-4979-bab8-ce0124fb0470-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-pc9rt\" (UID: \"94cfb221-c0cb-4979-bab8-ce0124fb0470\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509550 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htwtq\" (UniqueName: \"kubernetes.io/projected/535dd9f1-99c2-430e-82ff-0a148a0331e7-kube-api-access-htwtq\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509572 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-config\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509591 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-serving-cert\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509611 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/efebdc19-5ace-480b-8151-51e2ea78b4e8-encryption-config\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509631 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmhvv\" (UniqueName: \"kubernetes.io/projected/6881ae5d-31b3-4749-bd1a-db65599d48d3-kube-api-access-nmhvv\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509650 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-trusted-ca-bundle\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509654 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-config\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509708 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncp8j\" (UniqueName: \"kubernetes.io/projected/94cfb221-c0cb-4979-bab8-ce0124fb0470-kube-api-access-ncp8j\") pod \"cluster-samples-operator-665b6dd947-pc9rt\" (UID: \"94cfb221-c0cb-4979-bab8-ce0124fb0470\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509739 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2145f358-3d68-4239-9267-cfe321b24ec3-auth-proxy-config\") pod \"machine-config-operator-74547568cd-5wx92\" (UID: \"2145f358-3d68-4239-9267-cfe321b24ec3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509775 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509779 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/efebdc19-5ace-480b-8151-51e2ea78b4e8-node-pullsecrets\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509797 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/efebdc19-5ace-480b-8151-51e2ea78b4e8-audit-dir\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509818 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-etcd-service-ca\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509839 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d11cf478-7ab4-47c0-aada-7b470f927f7f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-hl8t7\" (UID: \"d11cf478-7ab4-47c0-aada-7b470f927f7f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509859 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfzcz\" (UniqueName: \"kubernetes.io/projected/2145f358-3d68-4239-9267-cfe321b24ec3-kube-api-access-jfzcz\") pod \"machine-config-operator-74547568cd-5wx92\" (UID: \"2145f358-3d68-4239-9267-cfe321b24ec3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509881 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zhs4\" (UniqueName: \"kubernetes.io/projected/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-kube-api-access-2zhs4\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509899 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7dc60e7d-1c00-4801-bddb-27852f80fb7e-machine-approver-tls\") pod \"machine-approver-56656f9798-526sq\" (UID: \"7dc60e7d-1c00-4801-bddb-27852f80fb7e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509921 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5v2w\" (UniqueName: \"kubernetes.io/projected/d11cf478-7ab4-47c0-aada-7b470f927f7f-kube-api-access-z5v2w\") pod \"openshift-controller-manager-operator-756b6f6bc6-hl8t7\" (UID: \"d11cf478-7ab4-47c0-aada-7b470f927f7f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509943 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509964 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-service-ca\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509974 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-audit-policies\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509997 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efebdc19-5ace-480b-8151-51e2ea78b4e8-serving-cert\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510017 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d11cf478-7ab4-47c0-aada-7b470f927f7f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-hl8t7\" (UID: \"d11cf478-7ab4-47c0-aada-7b470f927f7f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510035 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/535dd9f1-99c2-430e-82ff-0a148a0331e7-config\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510060 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srvsj\" (UniqueName: \"kubernetes.io/projected/3d6a824d-7416-48c8-8181-0f0a057d10fb-kube-api-access-srvsj\") pod \"openshift-apiserver-operator-796bbdcf4f-pqvcq\" (UID: \"3d6a824d-7416-48c8-8181-0f0a057d10fb\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510079 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510101 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7fff11c-c10d-4ac5-b353-75fc431ab510-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5j26w\" (UID: \"a7fff11c-c10d-4ac5-b353-75fc431ab510\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510118 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/535dd9f1-99c2-430e-82ff-0a148a0331e7-service-ca-bundle\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510137 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d6a824d-7416-48c8-8181-0f0a057d10fb-config\") pod \"openshift-apiserver-operator-796bbdcf4f-pqvcq\" (UID: \"3d6a824d-7416-48c8-8181-0f0a057d10fb\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510176 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-oauth-serving-cert\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510197 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbjj4\" (UniqueName: \"kubernetes.io/projected/7dc60e7d-1c00-4801-bddb-27852f80fb7e-kube-api-access-tbjj4\") pod \"machine-approver-56656f9798-526sq\" (UID: \"7dc60e7d-1c00-4801-bddb-27852f80fb7e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510216 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2145f358-3d68-4239-9267-cfe321b24ec3-images\") pod \"machine-config-operator-74547568cd-5wx92\" (UID: \"2145f358-3d68-4239-9267-cfe321b24ec3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510237 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3bf20ee6-67c1-47a6-b47e-5de4d187a495-audit-dir\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510258 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510277 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-config\") pod \"route-controller-manager-6576b87f9c-4dmcv\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510295 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qz8g\" (UniqueName: \"kubernetes.io/projected/6d5a0929-fc79-4de8-98b7-d5238c625373-kube-api-access-2qz8g\") pod \"migrator-59844c95c7-jsbfp\" (UID: \"6d5a0929-fc79-4de8-98b7-d5238c625373\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jsbfp" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510318 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-etcd-serving-ca\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510340 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-serving-cert\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510380 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510401 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-client-ca\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510420 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dc60e7d-1c00-4801-bddb-27852f80fb7e-config\") pod \"machine-approver-56656f9798-526sq\" (UID: \"7dc60e7d-1c00-4801-bddb-27852f80fb7e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510532 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7dc60e7d-1c00-4801-bddb-27852f80fb7e-auth-proxy-config\") pod \"machine-approver-56656f9798-526sq\" (UID: \"7dc60e7d-1c00-4801-bddb-27852f80fb7e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.510915 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dc60e7d-1c00-4801-bddb-27852f80fb7e-config\") pod \"machine-approver-56656f9798-526sq\" (UID: \"7dc60e7d-1c00-4801-bddb-27852f80fb7e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.511916 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.511987 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-config\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.512438 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-trusted-ca-bundle\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509034 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-audit\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.509214 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-config\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.513361 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.515053 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.515332 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efebdc19-5ace-480b-8151-51e2ea78b4e8-serving-cert\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.515640 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d6a824d-7416-48c8-8181-0f0a057d10fb-config\") pod \"openshift-apiserver-operator-796bbdcf4f-pqvcq\" (UID: \"3d6a824d-7416-48c8-8181-0f0a057d10fb\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.516013 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f06747c9-3031-4c32-b9c4-56bc9e28f2c0-proxy-tls\") pod \"machine-config-controller-84d6567774-86226\" (UID: \"f06747c9-3031-4c32-b9c4-56bc9e28f2c0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.516209 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/efebdc19-5ace-480b-8151-51e2ea78b4e8-audit-dir\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.516391 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-oauth-serving-cert\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.516538 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3bf20ee6-67c1-47a6-b47e-5de4d187a495-audit-dir\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.516625 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.517094 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-image-import-ca\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.517232 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.517417 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.517724 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/efebdc19-5ace-480b-8151-51e2ea78b4e8-etcd-serving-ca\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.517756 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-oauth-config\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.518100 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d6a824d-7416-48c8-8181-0f0a057d10fb-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-pqvcq\" (UID: \"3d6a824d-7416-48c8-8181-0f0a057d10fb\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.518111 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/efebdc19-5ace-480b-8151-51e2ea78b4e8-etcd-client\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.519542 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.519669 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.519692 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-trusted-ca-bundle\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.519943 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-audit-policies\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.520700 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.520697 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-client-ca\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.520939 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.521035 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.521410 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-px5jz"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.521651 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.522284 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-5tzwh"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.522468 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.523316 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-89k72"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.523664 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-5tzwh" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.523785 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.524060 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.524095 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.524407 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.524449 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.525685 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/efebdc19-5ace-480b-8151-51e2ea78b4e8-encryption-config\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.525821 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-serving-cert\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.525826 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.527544 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-pphhc"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.528430 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-g5n2c"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.528597 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-pphhc" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.529505 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.530169 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.533442 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.535222 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.536588 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.546667 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.547530 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.548502 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.548678 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.549202 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.549399 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.549700 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.551037 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k4g4f"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.551665 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.551771 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.554172 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.554970 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.559029 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.561034 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.563104 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-serving-cert\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.564405 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7dc60e7d-1c00-4801-bddb-27852f80fb7e-machine-approver-tls\") pod \"machine-approver-56656f9798-526sq\" (UID: \"7dc60e7d-1c00-4801-bddb-27852f80fb7e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.573281 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-j75h2"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.576086 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.577055 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.578540 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pgjz6"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.579495 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t4jgn"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.580552 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-7kn22"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.581567 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.582605 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-r9pw7"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.583434 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-2dvf9"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.584328 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-86226"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.585367 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.586273 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dbvg7"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.587228 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-lsb2b"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.588356 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-6gzcq"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.589267 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6gzcq" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.589342 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-kzzxw"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.590304 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-kzzxw" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.590818 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.591696 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-5tzwh"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.593118 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.594702 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.595345 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.596061 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-944sj"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.597064 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-89k72"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.598664 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.599710 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.600448 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.601362 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-jsbfp"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.602450 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.603530 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-px5jz"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.604694 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-sstkh"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.605724 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-sstkh" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.606126 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rrv4j"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.607218 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.607252 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k4g4f"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.608629 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.609839 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.610909 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611132 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2145f358-3d68-4239-9267-cfe321b24ec3-images\") pod \"machine-config-operator-74547568cd-5wx92\" (UID: \"2145f358-3d68-4239-9267-cfe321b24ec3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611204 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-config\") pod \"route-controller-manager-6576b87f9c-4dmcv\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611237 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qz8g\" (UniqueName: \"kubernetes.io/projected/6d5a0929-fc79-4de8-98b7-d5238c625373-kube-api-access-2qz8g\") pod \"migrator-59844c95c7-jsbfp\" (UID: \"6d5a0929-fc79-4de8-98b7-d5238c625373\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jsbfp" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611265 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-config\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611285 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/65a97abc-c92c-4b07-8922-dace15327fb1-etcd-client\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611307 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-serving-cert\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611343 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/77a79ecb-4a46-43f2-9187-7cd6fc3dc641-images\") pod \"machine-api-operator-5694c8668f-r9pw7\" (UID: \"77a79ecb-4a46-43f2-9187-7cd6fc3dc641\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611373 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-client-ca\") pod \"route-controller-manager-6576b87f9c-4dmcv\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611394 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65a97abc-c92c-4b07-8922-dace15327fb1-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611431 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3ed2bb90-62d7-4254-8f3d-d744b8edce46-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nnx69\" (UID: \"3ed2bb90-62d7-4254-8f3d-d744b8edce46\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611463 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7fff11c-c10d-4ac5-b353-75fc431ab510-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5j26w\" (UID: \"a7fff11c-c10d-4ac5-b353-75fc431ab510\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611486 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhhbv\" (UniqueName: \"kubernetes.io/projected/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-kube-api-access-vhhbv\") pod \"route-controller-manager-6576b87f9c-4dmcv\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611521 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-etcd-client\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611538 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-serving-cert\") pod \"route-controller-manager-6576b87f9c-4dmcv\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611557 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/77a79ecb-4a46-43f2-9187-7cd6fc3dc641-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-r9pw7\" (UID: \"77a79ecb-4a46-43f2-9187-7cd6fc3dc641\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611578 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74qmg\" (UniqueName: \"kubernetes.io/projected/77a79ecb-4a46-43f2-9187-7cd6fc3dc641-kube-api-access-74qmg\") pod \"machine-api-operator-5694c8668f-r9pw7\" (UID: \"77a79ecb-4a46-43f2-9187-7cd6fc3dc641\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611597 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2145f358-3d68-4239-9267-cfe321b24ec3-proxy-tls\") pod \"machine-config-operator-74547568cd-5wx92\" (UID: \"2145f358-3d68-4239-9267-cfe321b24ec3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611617 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3ed2bb90-62d7-4254-8f3d-d744b8edce46-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nnx69\" (UID: \"3ed2bb90-62d7-4254-8f3d-d744b8edce46\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611638 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/535dd9f1-99c2-430e-82ff-0a148a0331e7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611660 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/65a97abc-c92c-4b07-8922-dace15327fb1-audit-dir\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611681 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77a79ecb-4a46-43f2-9187-7cd6fc3dc641-config\") pod \"machine-api-operator-5694c8668f-r9pw7\" (UID: \"77a79ecb-4a46-43f2-9187-7cd6fc3dc641\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611697 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65a97abc-c92c-4b07-8922-dace15327fb1-serving-cert\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611713 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/535dd9f1-99c2-430e-82ff-0a148a0331e7-serving-cert\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611764 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk4ht\" (UniqueName: \"kubernetes.io/projected/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-kube-api-access-wk4ht\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611786 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/65a97abc-c92c-4b07-8922-dace15327fb1-encryption-config\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611804 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7dc9\" (UniqueName: \"kubernetes.io/projected/65a97abc-c92c-4b07-8922-dace15327fb1-kube-api-access-p7dc9\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611824 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szw59\" (UniqueName: \"kubernetes.io/projected/3ed2bb90-62d7-4254-8f3d-d744b8edce46-kube-api-access-szw59\") pod \"cluster-image-registry-operator-dc59b4c8b-nnx69\" (UID: \"3ed2bb90-62d7-4254-8f3d-d744b8edce46\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611845 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-etcd-ca\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611866 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98cg7\" (UniqueName: \"kubernetes.io/projected/a7fff11c-c10d-4ac5-b353-75fc431ab510-kube-api-access-98cg7\") pod \"kube-storage-version-migrator-operator-b67b599dd-5j26w\" (UID: \"a7fff11c-c10d-4ac5-b353-75fc431ab510\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611887 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/65a97abc-c92c-4b07-8922-dace15327fb1-audit-policies\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611907 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/65a97abc-c92c-4b07-8922-dace15327fb1-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611939 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/94cfb221-c0cb-4979-bab8-ce0124fb0470-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-pc9rt\" (UID: \"94cfb221-c0cb-4979-bab8-ce0124fb0470\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611957 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htwtq\" (UniqueName: \"kubernetes.io/projected/535dd9f1-99c2-430e-82ff-0a148a0331e7-kube-api-access-htwtq\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.611979 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3ed2bb90-62d7-4254-8f3d-d744b8edce46-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nnx69\" (UID: \"3ed2bb90-62d7-4254-8f3d-d744b8edce46\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612016 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncp8j\" (UniqueName: \"kubernetes.io/projected/94cfb221-c0cb-4979-bab8-ce0124fb0470-kube-api-access-ncp8j\") pod \"cluster-samples-operator-665b6dd947-pc9rt\" (UID: \"94cfb221-c0cb-4979-bab8-ce0124fb0470\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612038 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2145f358-3d68-4239-9267-cfe321b24ec3-auth-proxy-config\") pod \"machine-config-operator-74547568cd-5wx92\" (UID: \"2145f358-3d68-4239-9267-cfe321b24ec3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612069 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-etcd-service-ca\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612090 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d11cf478-7ab4-47c0-aada-7b470f927f7f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-hl8t7\" (UID: \"d11cf478-7ab4-47c0-aada-7b470f927f7f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612112 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfzcz\" (UniqueName: \"kubernetes.io/projected/2145f358-3d68-4239-9267-cfe321b24ec3-kube-api-access-jfzcz\") pod \"machine-config-operator-74547568cd-5wx92\" (UID: \"2145f358-3d68-4239-9267-cfe321b24ec3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612162 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5v2w\" (UniqueName: \"kubernetes.io/projected/d11cf478-7ab4-47c0-aada-7b470f927f7f-kube-api-access-z5v2w\") pod \"openshift-controller-manager-operator-756b6f6bc6-hl8t7\" (UID: \"d11cf478-7ab4-47c0-aada-7b470f927f7f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612200 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d11cf478-7ab4-47c0-aada-7b470f927f7f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-hl8t7\" (UID: \"d11cf478-7ab4-47c0-aada-7b470f927f7f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612220 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/535dd9f1-99c2-430e-82ff-0a148a0331e7-config\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612245 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/535dd9f1-99c2-430e-82ff-0a148a0331e7-service-ca-bundle\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612263 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7fff11c-c10d-4ac5-b353-75fc431ab510-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5j26w\" (UID: \"a7fff11c-c10d-4ac5-b353-75fc431ab510\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612307 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/77a79ecb-4a46-43f2-9187-7cd6fc3dc641-images\") pod \"machine-api-operator-5694c8668f-r9pw7\" (UID: \"77a79ecb-4a46-43f2-9187-7cd6fc3dc641\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612347 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-config\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612391 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612554 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2145f358-3d68-4239-9267-cfe321b24ec3-images\") pod \"machine-config-operator-74547568cd-5wx92\" (UID: \"2145f358-3d68-4239-9267-cfe321b24ec3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612821 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.612824 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-config\") pod \"route-controller-manager-6576b87f9c-4dmcv\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.613619 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-etcd-ca\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.613762 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/65a97abc-c92c-4b07-8922-dace15327fb1-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.614082 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/65a97abc-c92c-4b07-8922-dace15327fb1-audit-policies\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.614362 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/65a97abc-c92c-4b07-8922-dace15327fb1-audit-dir\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.614489 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-client-ca\") pod \"route-controller-manager-6576b87f9c-4dmcv\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.614675 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65a97abc-c92c-4b07-8922-dace15327fb1-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.615583 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7fff11c-c10d-4ac5-b353-75fc431ab510-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5j26w\" (UID: \"a7fff11c-c10d-4ac5-b353-75fc431ab510\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.616214 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/65a97abc-c92c-4b07-8922-dace15327fb1-encryption-config\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.616278 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2145f358-3d68-4239-9267-cfe321b24ec3-auth-proxy-config\") pod \"machine-config-operator-74547568cd-5wx92\" (UID: \"2145f358-3d68-4239-9267-cfe321b24ec3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.616395 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7fff11c-c10d-4ac5-b353-75fc431ab510-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5j26w\" (UID: \"a7fff11c-c10d-4ac5-b353-75fc431ab510\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.616415 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/65a97abc-c92c-4b07-8922-dace15327fb1-etcd-client\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.616929 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-etcd-service-ca\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.617210 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/94cfb221-c0cb-4979-bab8-ce0124fb0470-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-pc9rt\" (UID: \"94cfb221-c0cb-4979-bab8-ce0124fb0470\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.617218 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77a79ecb-4a46-43f2-9187-7cd6fc3dc641-config\") pod \"machine-api-operator-5694c8668f-r9pw7\" (UID: \"77a79ecb-4a46-43f2-9187-7cd6fc3dc641\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.617454 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d11cf478-7ab4-47c0-aada-7b470f927f7f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-hl8t7\" (UID: \"d11cf478-7ab4-47c0-aada-7b470f927f7f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.617843 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-etcd-client\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.618108 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/535dd9f1-99c2-430e-82ff-0a148a0331e7-config\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.618337 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-serving-cert\") pod \"route-controller-manager-6576b87f9c-4dmcv\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.618350 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/535dd9f1-99c2-430e-82ff-0a148a0331e7-service-ca-bundle\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.618826 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/535dd9f1-99c2-430e-82ff-0a148a0331e7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.618855 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-serving-cert\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.619294 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2145f358-3d68-4239-9267-cfe321b24ec3-proxy-tls\") pod \"machine-config-operator-74547568cd-5wx92\" (UID: \"2145f358-3d68-4239-9267-cfe321b24ec3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.619665 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-g5n2c"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.620292 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/535dd9f1-99c2-430e-82ff-0a148a0331e7-serving-cert\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.620387 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d11cf478-7ab4-47c0-aada-7b470f927f7f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-hl8t7\" (UID: \"d11cf478-7ab4-47c0-aada-7b470f927f7f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.620603 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/77a79ecb-4a46-43f2-9187-7cd6fc3dc641-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-r9pw7\" (UID: \"77a79ecb-4a46-43f2-9187-7cd6fc3dc641\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.620788 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65a97abc-c92c-4b07-8922-dace15327fb1-serving-cert\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.621157 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.621203 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.622587 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-sstkh"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.623667 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.624694 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-kzzxw"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.625717 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-pphhc"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.626726 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.628061 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.629343 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rrv4j"] Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.635658 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.655662 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.683600 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.695630 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.713420 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szw59\" (UniqueName: \"kubernetes.io/projected/3ed2bb90-62d7-4254-8f3d-d744b8edce46-kube-api-access-szw59\") pod \"cluster-image-registry-operator-dc59b4c8b-nnx69\" (UID: \"3ed2bb90-62d7-4254-8f3d-d744b8edce46\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.713515 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3ed2bb90-62d7-4254-8f3d-d744b8edce46-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nnx69\" (UID: \"3ed2bb90-62d7-4254-8f3d-d744b8edce46\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.713712 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3ed2bb90-62d7-4254-8f3d-d744b8edce46-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nnx69\" (UID: \"3ed2bb90-62d7-4254-8f3d-d744b8edce46\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.713810 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3ed2bb90-62d7-4254-8f3d-d744b8edce46-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nnx69\" (UID: \"3ed2bb90-62d7-4254-8f3d-d744b8edce46\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.715697 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.755968 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.777374 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.795496 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.816581 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.836470 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.855668 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.875644 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.901903 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.905579 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3ed2bb90-62d7-4254-8f3d-d744b8edce46-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nnx69\" (UID: \"3ed2bb90-62d7-4254-8f3d-d744b8edce46\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.915960 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.936792 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.948351 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3ed2bb90-62d7-4254-8f3d-d744b8edce46-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nnx69\" (UID: \"3ed2bb90-62d7-4254-8f3d-d744b8edce46\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.955262 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.976774 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 01 18:32:11 crc kubenswrapper[4935]: I1201 18:32:11.995802 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.016878 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.038881 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.055865 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.076884 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.097407 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.116274 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.136793 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.156372 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.177917 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.203721 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.235850 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz7mk\" (UniqueName: \"kubernetes.io/projected/f06747c9-3031-4c32-b9c4-56bc9e28f2c0-kube-api-access-vz7mk\") pod \"machine-config-controller-84d6567774-86226\" (UID: \"f06747c9-3031-4c32-b9c4-56bc9e28f2c0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.254686 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srvsj\" (UniqueName: \"kubernetes.io/projected/3d6a824d-7416-48c8-8181-0f0a057d10fb-kube-api-access-srvsj\") pod \"openshift-apiserver-operator-796bbdcf4f-pqvcq\" (UID: \"3d6a824d-7416-48c8-8181-0f0a057d10fb\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.274552 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llrgx\" (UniqueName: \"kubernetes.io/projected/3bf20ee6-67c1-47a6-b47e-5de4d187a495-kube-api-access-llrgx\") pod \"oauth-openshift-558db77b4-pgjz6\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.303210 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbjj4\" (UniqueName: \"kubernetes.io/projected/7dc60e7d-1c00-4801-bddb-27852f80fb7e-kube-api-access-tbjj4\") pod \"machine-approver-56656f9798-526sq\" (UID: \"7dc60e7d-1c00-4801-bddb-27852f80fb7e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.323901 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8wgk\" (UniqueName: \"kubernetes.io/projected/efebdc19-5ace-480b-8151-51e2ea78b4e8-kube-api-access-d8wgk\") pod \"apiserver-76f77b778f-j75h2\" (UID: \"efebdc19-5ace-480b-8151-51e2ea78b4e8\") " pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.324075 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.332040 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmhvv\" (UniqueName: \"kubernetes.io/projected/6881ae5d-31b3-4749-bd1a-db65599d48d3-kube-api-access-nmhvv\") pod \"console-f9d7485db-dbvg7\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.339273 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.357610 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.364730 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zhs4\" (UniqueName: \"kubernetes.io/projected/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-kube-api-access-2zhs4\") pod \"controller-manager-879f6c89f-t4jgn\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.367475 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.375963 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 01 18:32:12 crc kubenswrapper[4935]: W1201 18:32:12.396080 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7dc60e7d_1c00_4801_bddb_27852f80fb7e.slice/crio-ad8d2f70be01711911577a372ddb5fafd4cd66db28e8cc33ae1339069d087ce1 WatchSource:0}: Error finding container ad8d2f70be01711911577a372ddb5fafd4cd66db28e8cc33ae1339069d087ce1: Status 404 returned error can't find the container with id ad8d2f70be01711911577a372ddb5fafd4cd66db28e8cc33ae1339069d087ce1 Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.396674 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.407635 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.416322 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.437250 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.456983 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.460632 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.476924 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.496180 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.515658 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.530868 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.531095 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.531160 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.531247 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.531288 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:32:12 crc kubenswrapper[4935]: E1201 18:32:12.532934 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:34:14.532900996 +0000 UTC m=+268.554530255 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.534227 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.536453 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.536469 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.536515 4935 request.go:700] Waited for 1.012509631s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-multus/secrets?fieldSelector=metadata.name%3Dmultus-admission-controller-secret&limit=500&resourceVersion=0 Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.537815 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.540027 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.555689 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.560208 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-86226"] Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.575463 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.591174 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.596655 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq"] Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.598659 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.610125 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:12 crc kubenswrapper[4935]: W1201 18:32:12.613949 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d6a824d_7416_48c8_8181_0f0a057d10fb.slice/crio-7905167b8ae9468fa8427a2a5b18c0dca9115972f369e044e74fb3ecf2da093d WatchSource:0}: Error finding container 7905167b8ae9468fa8427a2a5b18c0dca9115972f369e044e74fb3ecf2da093d: Status 404 returned error can't find the container with id 7905167b8ae9468fa8427a2a5b18c0dca9115972f369e044e74fb3ecf2da093d Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.615103 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.637853 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.639044 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pgjz6"] Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.656723 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 01 18:32:12 crc kubenswrapper[4935]: W1201 18:32:12.659325 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3bf20ee6_67c1_47a6_b47e_5de4d187a495.slice/crio-ced12cf8269628e08f6885c7456be2df54f720c15be1a6b7fb822e44ca002b37 WatchSource:0}: Error finding container ced12cf8269628e08f6885c7456be2df54f720c15be1a6b7fb822e44ca002b37: Status 404 returned error can't find the container with id ced12cf8269628e08f6885c7456be2df54f720c15be1a6b7fb822e44ca002b37 Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.676466 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.698251 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.717794 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dbvg7"] Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.718638 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.728368 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.740728 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.740814 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.756684 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.763578 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.776421 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.798283 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.818950 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.836729 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.845586 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t4jgn"] Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.856463 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.877297 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.882478 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-j75h2"] Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.896567 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.915796 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.936002 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.956929 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.975893 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 01 18:32:12 crc kubenswrapper[4935]: I1201 18:32:12.995540 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 01 18:32:13 crc kubenswrapper[4935]: W1201 18:32:13.015771 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podebcaf8e6_60d5_43ee_993d_9ed20564e23f.slice/crio-2cb76fb5db3a2653d6f0aa3ae751405eee398cb54f9bfcff21444fb7f2d94165 WatchSource:0}: Error finding container 2cb76fb5db3a2653d6f0aa3ae751405eee398cb54f9bfcff21444fb7f2d94165: Status 404 returned error can't find the container with id 2cb76fb5db3a2653d6f0aa3ae751405eee398cb54f9bfcff21444fb7f2d94165 Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.016963 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.037090 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.058439 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.077358 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.097007 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.116900 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.138129 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.167615 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.179261 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.195575 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.216757 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.237298 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.256562 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.277021 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.296803 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.316237 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 01 18:32:13 crc kubenswrapper[4935]: W1201 18:32:13.321467 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-fecea9dc77aa0c40a997a0f21d494ae1e0feefd24a5a9655901781b3bf142a52 WatchSource:0}: Error finding container fecea9dc77aa0c40a997a0f21d494ae1e0feefd24a5a9655901781b3bf142a52: Status 404 returned error can't find the container with id fecea9dc77aa0c40a997a0f21d494ae1e0feefd24a5a9655901781b3bf142a52 Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.337322 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 01 18:32:13 crc kubenswrapper[4935]: W1201 18:32:13.346056 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-9f3595b1aae4b17d4f57f48bf2cb6e26fd6ebb899bae94eb944478bf218925a7 WatchSource:0}: Error finding container 9f3595b1aae4b17d4f57f48bf2cb6e26fd6ebb899bae94eb944478bf218925a7: Status 404 returned error can't find the container with id 9f3595b1aae4b17d4f57f48bf2cb6e26fd6ebb899bae94eb944478bf218925a7 Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.356230 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.396794 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.400084 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" event={"ID":"f06747c9-3031-4c32-b9c4-56bc9e28f2c0","Type":"ContainerStarted","Data":"bdc0b358a3116f875d17b818286eecb63092015a9f41447ede3a5f11836c8bbc"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.400130 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" event={"ID":"f06747c9-3031-4c32-b9c4-56bc9e28f2c0","Type":"ContainerStarted","Data":"ac6f97a3a34eeb54109c2b09ab5522a35f181171691bc8f3cbfa4128c4c52640"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.400183 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" event={"ID":"f06747c9-3031-4c32-b9c4-56bc9e28f2c0","Type":"ContainerStarted","Data":"458f8147e52046b76ec140e8b4f7b674309f57ef0455935d5b5493205058446a"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.400832 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"fecea9dc77aa0c40a997a0f21d494ae1e0feefd24a5a9655901781b3bf142a52"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.401950 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"9f3595b1aae4b17d4f57f48bf2cb6e26fd6ebb899bae94eb944478bf218925a7"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.403208 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" event={"ID":"ebcaf8e6-60d5-43ee-993d-9ed20564e23f","Type":"ContainerStarted","Data":"4d2dfe5822c81eed5fc3ce9393f2d38c204e1e4f9b31dfc36752821eec934255"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.403232 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" event={"ID":"ebcaf8e6-60d5-43ee-993d-9ed20564e23f","Type":"ContainerStarted","Data":"2cb76fb5db3a2653d6f0aa3ae751405eee398cb54f9bfcff21444fb7f2d94165"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.404198 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.406980 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" event={"ID":"3bf20ee6-67c1-47a6-b47e-5de4d187a495","Type":"ContainerStarted","Data":"f82783ec598686f5a24a773502e1ae1f33fbce0d3179cd5b78724ab43eb16ca3"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.407135 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" event={"ID":"3bf20ee6-67c1-47a6-b47e-5de4d187a495","Type":"ContainerStarted","Data":"ced12cf8269628e08f6885c7456be2df54f720c15be1a6b7fb822e44ca002b37"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.407243 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.407051 4935 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-t4jgn container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.407422 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" podUID="ebcaf8e6-60d5-43ee-993d-9ed20564e23f" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.410986 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"6a5c0dce19fafb8367c0ccd39770598b87f69a851b778f2d2f8f5ba91976be4b"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.411193 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"fb1255ef86d6837fee5b94ccb118bc4e2c67856784f86c13414cbdf77b855c1a"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.411130 4935 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-pgjz6 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" start-of-body= Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.411378 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" podUID="3bf20ee6-67c1-47a6-b47e-5de4d187a495" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.411589 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.412949 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" event={"ID":"3d6a824d-7416-48c8-8181-0f0a057d10fb","Type":"ContainerStarted","Data":"ccf148e71e0b4b121314a91dd93193a5834f9451da8918279fec0b40d89a912d"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.412977 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" event={"ID":"3d6a824d-7416-48c8-8181-0f0a057d10fb","Type":"ContainerStarted","Data":"7905167b8ae9468fa8427a2a5b18c0dca9115972f369e044e74fb3ecf2da093d"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.415564 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.415701 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-j75h2" event={"ID":"efebdc19-5ace-480b-8151-51e2ea78b4e8","Type":"ContainerStarted","Data":"914f0220e96593d649e5ebb6e7b411e983957a08c527741f0055415fdc4f1883"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.419593 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dbvg7" event={"ID":"6881ae5d-31b3-4749-bd1a-db65599d48d3","Type":"ContainerStarted","Data":"b60f0ad2276aa44c0548c1ddb4f2ab6cf35726a57a9cd2077bcfc7ebebc249fe"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.419809 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dbvg7" event={"ID":"6881ae5d-31b3-4749-bd1a-db65599d48d3","Type":"ContainerStarted","Data":"a2d8b817d83f17d308a90a14a31de72475bdd9d2ecde24019ff758253bf0b222"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.422694 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" event={"ID":"7dc60e7d-1c00-4801-bddb-27852f80fb7e","Type":"ContainerStarted","Data":"a81f18e8ff8c32ee89593e22b91796a4b38f7c28b97aa66e7c583f6479bab916"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.422772 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" event={"ID":"7dc60e7d-1c00-4801-bddb-27852f80fb7e","Type":"ContainerStarted","Data":"ad8d2f70be01711911577a372ddb5fafd4cd66db28e8cc33ae1339069d087ce1"} Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.436395 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.456632 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.476408 4935 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.497740 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.515495 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.554121 4935 request.go:700] Waited for 1.940932882s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa/token Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.564343 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qz8g\" (UniqueName: \"kubernetes.io/projected/6d5a0929-fc79-4de8-98b7-d5238c625373-kube-api-access-2qz8g\") pod \"migrator-59844c95c7-jsbfp\" (UID: \"6d5a0929-fc79-4de8-98b7-d5238c625373\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jsbfp" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.574177 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7dc9\" (UniqueName: \"kubernetes.io/projected/65a97abc-c92c-4b07-8922-dace15327fb1-kube-api-access-p7dc9\") pod \"apiserver-7bbb656c7d-tnkbz\" (UID: \"65a97abc-c92c-4b07-8922-dace15327fb1\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.597750 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98cg7\" (UniqueName: \"kubernetes.io/projected/a7fff11c-c10d-4ac5-b353-75fc431ab510-kube-api-access-98cg7\") pod \"kube-storage-version-migrator-operator-b67b599dd-5j26w\" (UID: \"a7fff11c-c10d-4ac5-b353-75fc431ab510\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.624012 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74qmg\" (UniqueName: \"kubernetes.io/projected/77a79ecb-4a46-43f2-9187-7cd6fc3dc641-kube-api-access-74qmg\") pod \"machine-api-operator-5694c8668f-r9pw7\" (UID: \"77a79ecb-4a46-43f2-9187-7cd6fc3dc641\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.635819 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhhbv\" (UniqueName: \"kubernetes.io/projected/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-kube-api-access-vhhbv\") pod \"route-controller-manager-6576b87f9c-4dmcv\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.651464 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htwtq\" (UniqueName: \"kubernetes.io/projected/535dd9f1-99c2-430e-82ff-0a148a0331e7-kube-api-access-htwtq\") pod \"authentication-operator-69f744f599-2dvf9\" (UID: \"535dd9f1-99c2-430e-82ff-0a148a0331e7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.669800 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfzcz\" (UniqueName: \"kubernetes.io/projected/2145f358-3d68-4239-9267-cfe321b24ec3-kube-api-access-jfzcz\") pod \"machine-config-operator-74547568cd-5wx92\" (UID: \"2145f358-3d68-4239-9267-cfe321b24ec3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.690173 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.698681 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.705025 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncp8j\" (UniqueName: \"kubernetes.io/projected/94cfb221-c0cb-4979-bab8-ce0124fb0470-kube-api-access-ncp8j\") pod \"cluster-samples-operator-665b6dd947-pc9rt\" (UID: \"94cfb221-c0cb-4979-bab8-ce0124fb0470\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.725130 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5v2w\" (UniqueName: \"kubernetes.io/projected/d11cf478-7ab4-47c0-aada-7b470f927f7f-kube-api-access-z5v2w\") pod \"openshift-controller-manager-operator-756b6f6bc6-hl8t7\" (UID: \"d11cf478-7ab4-47c0-aada-7b470f927f7f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.742372 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk4ht\" (UniqueName: \"kubernetes.io/projected/2b90e933-e7fe-4bfb-a2cc-47de5c67d631-kube-api-access-wk4ht\") pod \"etcd-operator-b45778765-7kn22\" (UID: \"2b90e933-e7fe-4bfb-a2cc-47de5c67d631\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.746725 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.753335 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szw59\" (UniqueName: \"kubernetes.io/projected/3ed2bb90-62d7-4254-8f3d-d744b8edce46-kube-api-access-szw59\") pod \"cluster-image-registry-operator-dc59b4c8b-nnx69\" (UID: \"3ed2bb90-62d7-4254-8f3d-d744b8edce46\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.754921 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jsbfp" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.765102 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.773816 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3ed2bb90-62d7-4254-8f3d-d744b8edce46-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nnx69\" (UID: \"3ed2bb90-62d7-4254-8f3d-d744b8edce46\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.782617 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.791825 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.799238 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.827967 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851218 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72ca0a59-24a4-459e-a0d3-891d6ac90ed7-config\") pod \"kube-apiserver-operator-766d6c64bb-w9jp2\" (UID: \"72ca0a59-24a4-459e-a0d3-891d6ac90ed7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851278 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/72ca0a59-24a4-459e-a0d3-891d6ac90ed7-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-w9jp2\" (UID: \"72ca0a59-24a4-459e-a0d3-891d6ac90ed7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851342 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdhx6\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-kube-api-access-qdhx6\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851373 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8bac674-5c61-4782-9f74-6374a430e7fc-config-volume\") pod \"collect-profiles-29410230-xt77b\" (UID: \"d8bac674-5c61-4782-9f74-6374a430e7fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851401 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwqhh\" (UniqueName: \"kubernetes.io/projected/ca0af18b-162c-47f2-aa52-6bcd54a87a80-kube-api-access-nwqhh\") pod \"service-ca-operator-777779d784-x4vnj\" (UID: \"ca0af18b-162c-47f2-aa52-6bcd54a87a80\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851441 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2m9f\" (UniqueName: \"kubernetes.io/projected/625e7064-21f1-491d-bbc2-1d2b2faaa977-kube-api-access-k2m9f\") pod \"packageserver-d55dfcdfc-ccph8\" (UID: \"625e7064-21f1-491d-bbc2-1d2b2faaa977\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851504 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b337cc76-0681-41e9-9cdb-7c660ee29b84-serving-cert\") pod \"console-operator-58897d9998-944sj\" (UID: \"b337cc76-0681-41e9-9cdb-7c660ee29b84\") " pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851543 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/38933f63-180c-4831-858b-2efc49cf634a-srv-cert\") pod \"catalog-operator-68c6474976-crm6z\" (UID: \"38933f63-180c-4831-858b-2efc49cf634a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851566 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72ca0a59-24a4-459e-a0d3-891d6ac90ed7-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-w9jp2\" (UID: \"72ca0a59-24a4-459e-a0d3-891d6ac90ed7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851620 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22-metrics-tls\") pod \"dns-operator-744455d44c-lsb2b\" (UID: \"5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22\") " pod="openshift-dns-operator/dns-operator-744455d44c-lsb2b" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851755 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e7437d18-a515-4690-8ccf-65b8540426a1-certs\") pod \"machine-config-server-6gzcq\" (UID: \"e7437d18-a515-4690-8ccf-65b8540426a1\") " pod="openshift-machine-config-operator/machine-config-server-6gzcq" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851788 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/38933f63-180c-4831-858b-2efc49cf634a-profile-collector-cert\") pod \"catalog-operator-68c6474976-crm6z\" (UID: \"38933f63-180c-4831-858b-2efc49cf634a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851812 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b337cc76-0681-41e9-9cdb-7c660ee29b84-trusted-ca\") pod \"console-operator-58897d9998-944sj\" (UID: \"b337cc76-0681-41e9-9cdb-7c660ee29b84\") " pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851847 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6p7vg\" (UniqueName: \"kubernetes.io/projected/71041089-3004-4e0b-990d-c6e471b38ea6-kube-api-access-6p7vg\") pod \"dns-default-kzzxw\" (UID: \"71041089-3004-4e0b-990d-c6e471b38ea6\") " pod="openshift-dns/dns-default-kzzxw" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851887 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18dc429c-7515-47a0-b008-b60a631a2723-trusted-ca\") pod \"ingress-operator-5b745b69d9-zfbpg\" (UID: \"18dc429c-7515-47a0-b008-b60a631a2723\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851937 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-metrics-certs\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851953 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/625e7064-21f1-491d-bbc2-1d2b2faaa977-apiservice-cert\") pod \"packageserver-d55dfcdfc-ccph8\" (UID: \"625e7064-21f1-491d-bbc2-1d2b2faaa977\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.851987 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b123b9f1-7d6b-496c-87c2-7790b027abd6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-k4g4f\" (UID: \"b123b9f1-7d6b-496c-87c2-7790b027abd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.852006 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z94tv\" (UniqueName: \"kubernetes.io/projected/032f1651-2326-461b-897e-35303c17f32c-kube-api-access-z94tv\") pod \"multus-admission-controller-857f4d67dd-5tzwh\" (UID: \"032f1651-2326-461b-897e-35303c17f32c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-5tzwh" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.852022 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86vfr\" (UniqueName: \"kubernetes.io/projected/b123b9f1-7d6b-496c-87c2-7790b027abd6-kube-api-access-86vfr\") pod \"marketplace-operator-79b997595-k4g4f\" (UID: \"b123b9f1-7d6b-496c-87c2-7790b027abd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.852038 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/625e7064-21f1-491d-bbc2-1d2b2faaa977-webhook-cert\") pod \"packageserver-d55dfcdfc-ccph8\" (UID: \"625e7064-21f1-491d-bbc2-1d2b2faaa977\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.852077 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7jdz\" (UniqueName: \"kubernetes.io/projected/38933f63-180c-4831-858b-2efc49cf634a-kube-api-access-q7jdz\") pod \"catalog-operator-68c6474976-crm6z\" (UID: \"38933f63-180c-4831-858b-2efc49cf634a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.852098 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpgsc\" (UniqueName: \"kubernetes.io/projected/9f035d02-e865-4fc5-a759-372db1d4e910-kube-api-access-mpgsc\") pod \"olm-operator-6b444d44fb-48czs\" (UID: \"9f035d02-e865-4fc5-a759-372db1d4e910\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.852184 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b123b9f1-7d6b-496c-87c2-7790b027abd6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-k4g4f\" (UID: \"b123b9f1-7d6b-496c-87c2-7790b027abd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.852601 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.852639 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46sgh\" (UniqueName: \"kubernetes.io/projected/6ccc1894-d6b6-454c-ab68-45e1ffbc9124-kube-api-access-46sgh\") pod \"openshift-config-operator-7777fb866f-px5jz\" (UID: \"6ccc1894-d6b6-454c-ab68-45e1ffbc9124\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.852662 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shqdx\" (UniqueName: \"kubernetes.io/projected/d8bac674-5c61-4782-9f74-6374a430e7fc-kube-api-access-shqdx\") pod \"collect-profiles-29410230-xt77b\" (UID: \"d8bac674-5c61-4782-9f74-6374a430e7fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.852684 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4f81095d-3084-427f-8f0e-bdd180180c31-installation-pull-secrets\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.855704 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-m95dz\" (UID: \"4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.855794 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4f81095d-3084-427f-8f0e-bdd180180c31-registry-certificates\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.855894 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ccc1894-d6b6-454c-ab68-45e1ffbc9124-serving-cert\") pod \"openshift-config-operator-7777fb866f-px5jz\" (UID: \"6ccc1894-d6b6-454c-ab68-45e1ffbc9124\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" Dec 01 18:32:13 crc kubenswrapper[4935]: E1201 18:32:13.858914 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:14.358890908 +0000 UTC m=+148.380520167 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.873551 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9f035d02-e865-4fc5-a759-372db1d4e910-profile-collector-cert\") pod \"olm-operator-6b444d44fb-48czs\" (UID: \"9f035d02-e865-4fc5-a759-372db1d4e910\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.873641 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/40102f05-30f8-4552-8db6-140eced8121a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-wvsgh\" (UID: \"40102f05-30f8-4552-8db6-140eced8121a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.873660 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/335915be-d5ad-4beb-929c-b41d0b7c4601-signing-key\") pod \"service-ca-9c57cc56f-g5n2c\" (UID: \"335915be-d5ad-4beb-929c-b41d0b7c4601\") " pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.873685 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40102f05-30f8-4552-8db6-140eced8121a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-wvsgh\" (UID: \"40102f05-30f8-4552-8db6-140eced8121a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.873708 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/032f1651-2326-461b-897e-35303c17f32c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-5tzwh\" (UID: \"032f1651-2326-461b-897e-35303c17f32c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-5tzwh" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.873840 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/18dc429c-7515-47a0-b008-b60a631a2723-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zfbpg\" (UID: \"18dc429c-7515-47a0-b008-b60a631a2723\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.873886 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f502d82d-fe0c-40c0-aeba-1b50934fd13a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9st46\" (UID: \"f502d82d-fe0c-40c0-aeba-1b50934fd13a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.873912 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-default-certificate\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.873946 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-registry-tls\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.873975 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40102f05-30f8-4552-8db6-140eced8121a-config\") pod \"kube-controller-manager-operator-78b949d7b-wvsgh\" (UID: \"40102f05-30f8-4552-8db6-140eced8121a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.874006 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/18dc429c-7515-47a0-b008-b60a631a2723-metrics-tls\") pod \"ingress-operator-5b745b69d9-zfbpg\" (UID: \"18dc429c-7515-47a0-b008-b60a631a2723\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.874034 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/71041089-3004-4e0b-990d-c6e471b38ea6-config-volume\") pod \"dns-default-kzzxw\" (UID: \"71041089-3004-4e0b-990d-c6e471b38ea6\") " pod="openshift-dns/dns-default-kzzxw" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.874108 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q9jw\" (UniqueName: \"kubernetes.io/projected/4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a-kube-api-access-2q9jw\") pod \"package-server-manager-789f6589d5-m95dz\" (UID: \"4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.874434 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv5hr\" (UniqueName: \"kubernetes.io/projected/5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22-kube-api-access-bv5hr\") pod \"dns-operator-744455d44c-lsb2b\" (UID: \"5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22\") " pod="openshift-dns-operator/dns-operator-744455d44c-lsb2b" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.874461 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca0af18b-162c-47f2-aa52-6bcd54a87a80-serving-cert\") pod \"service-ca-operator-777779d784-x4vnj\" (UID: \"ca0af18b-162c-47f2-aa52-6bcd54a87a80\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.874491 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48vjt\" (UniqueName: \"kubernetes.io/projected/335915be-d5ad-4beb-929c-b41d0b7c4601-kube-api-access-48vjt\") pod \"service-ca-9c57cc56f-g5n2c\" (UID: \"335915be-d5ad-4beb-929c-b41d0b7c4601\") " pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.874533 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6t6rx\" (UniqueName: \"kubernetes.io/projected/18dc429c-7515-47a0-b008-b60a631a2723-kube-api-access-6t6rx\") pod \"ingress-operator-5b745b69d9-zfbpg\" (UID: \"18dc429c-7515-47a0-b008-b60a631a2723\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.874554 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ddd2fe2c-4083-4793-81d8-20d7fc05fe3d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-29f7l\" (UID: \"ddd2fe2c-4083-4793-81d8-20d7fc05fe3d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.874579 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-service-ca-bundle\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.874613 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9f035d02-e865-4fc5-a759-372db1d4e910-srv-cert\") pod \"olm-operator-6b444d44fb-48czs\" (UID: \"9f035d02-e865-4fc5-a759-372db1d4e910\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.874634 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4f81095d-3084-427f-8f0e-bdd180180c31-ca-trust-extracted\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.874654 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/6ccc1894-d6b6-454c-ab68-45e1ffbc9124-available-featuregates\") pod \"openshift-config-operator-7777fb866f-px5jz\" (UID: \"6ccc1894-d6b6-454c-ab68-45e1ffbc9124\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.874673 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-td4sg\" (UniqueName: \"kubernetes.io/projected/4c9a288d-97d2-4f8d-8e74-fb4913ee6627-kube-api-access-td4sg\") pod \"downloads-7954f5f757-pphhc\" (UID: \"4c9a288d-97d2-4f8d-8e74-fb4913ee6627\") " pod="openshift-console/downloads-7954f5f757-pphhc" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.877365 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b337cc76-0681-41e9-9cdb-7c660ee29b84-config\") pod \"console-operator-58897d9998-944sj\" (UID: \"b337cc76-0681-41e9-9cdb-7c660ee29b84\") " pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.877699 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t645p\" (UniqueName: \"kubernetes.io/projected/e7437d18-a515-4690-8ccf-65b8540426a1-kube-api-access-t645p\") pod \"machine-config-server-6gzcq\" (UID: \"e7437d18-a515-4690-8ccf-65b8540426a1\") " pod="openshift-machine-config-operator/machine-config-server-6gzcq" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.877835 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8bac674-5c61-4782-9f74-6374a430e7fc-secret-volume\") pod \"collect-profiles-29410230-xt77b\" (UID: \"d8bac674-5c61-4782-9f74-6374a430e7fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.877893 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e7437d18-a515-4690-8ccf-65b8540426a1-node-bootstrap-token\") pod \"machine-config-server-6gzcq\" (UID: \"e7437d18-a515-4690-8ccf-65b8540426a1\") " pod="openshift-machine-config-operator/machine-config-server-6gzcq" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.877925 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/625e7064-21f1-491d-bbc2-1d2b2faaa977-tmpfs\") pod \"packageserver-d55dfcdfc-ccph8\" (UID: \"625e7064-21f1-491d-bbc2-1d2b2faaa977\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.877957 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-bound-sa-token\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.878134 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca0af18b-162c-47f2-aa52-6bcd54a87a80-config\") pod \"service-ca-operator-777779d784-x4vnj\" (UID: \"ca0af18b-162c-47f2-aa52-6bcd54a87a80\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.879560 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/71041089-3004-4e0b-990d-c6e471b38ea6-metrics-tls\") pod \"dns-default-kzzxw\" (UID: \"71041089-3004-4e0b-990d-c6e471b38ea6\") " pod="openshift-dns/dns-default-kzzxw" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.879607 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f81095d-3084-427f-8f0e-bdd180180c31-trusted-ca\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.879860 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/335915be-d5ad-4beb-929c-b41d0b7c4601-signing-cabundle\") pod \"service-ca-9c57cc56f-g5n2c\" (UID: \"335915be-d5ad-4beb-929c-b41d0b7c4601\") " pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.880140 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k76qm\" (UniqueName: \"kubernetes.io/projected/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-kube-api-access-k76qm\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.888359 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f502d82d-fe0c-40c0-aeba-1b50934fd13a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9st46\" (UID: \"f502d82d-fe0c-40c0-aeba-1b50934fd13a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.888423 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjdx2\" (UniqueName: \"kubernetes.io/projected/ddd2fe2c-4083-4793-81d8-20d7fc05fe3d-kube-api-access-mjdx2\") pod \"control-plane-machine-set-operator-78cbb6b69f-29f7l\" (UID: \"ddd2fe2c-4083-4793-81d8-20d7fc05fe3d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.890879 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f502d82d-fe0c-40c0-aeba-1b50934fd13a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9st46\" (UID: \"f502d82d-fe0c-40c0-aeba-1b50934fd13a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.890930 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8pbp\" (UniqueName: \"kubernetes.io/projected/b337cc76-0681-41e9-9cdb-7c660ee29b84-kube-api-access-h8pbp\") pod \"console-operator-58897d9998-944sj\" (UID: \"b337cc76-0681-41e9-9cdb-7c660ee29b84\") " pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.891358 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-stats-auth\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.942197 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-r9pw7"] Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.990950 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv"] Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.992470 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:13 crc kubenswrapper[4935]: E1201 18:32:13.992750 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:14.492716088 +0000 UTC m=+148.514345347 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.992840 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/335915be-d5ad-4beb-929c-b41d0b7c4601-signing-cabundle\") pod \"service-ca-9c57cc56f-g5n2c\" (UID: \"335915be-d5ad-4beb-929c-b41d0b7c4601\") " pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.992876 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k76qm\" (UniqueName: \"kubernetes.io/projected/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-kube-api-access-k76qm\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.992942 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f502d82d-fe0c-40c0-aeba-1b50934fd13a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9st46\" (UID: \"f502d82d-fe0c-40c0-aeba-1b50934fd13a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.992965 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjdx2\" (UniqueName: \"kubernetes.io/projected/ddd2fe2c-4083-4793-81d8-20d7fc05fe3d-kube-api-access-mjdx2\") pod \"control-plane-machine-set-operator-78cbb6b69f-29f7l\" (UID: \"ddd2fe2c-4083-4793-81d8-20d7fc05fe3d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993011 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f502d82d-fe0c-40c0-aeba-1b50934fd13a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9st46\" (UID: \"f502d82d-fe0c-40c0-aeba-1b50934fd13a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993034 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8pbp\" (UniqueName: \"kubernetes.io/projected/b337cc76-0681-41e9-9cdb-7c660ee29b84-kube-api-access-h8pbp\") pod \"console-operator-58897d9998-944sj\" (UID: \"b337cc76-0681-41e9-9cdb-7c660ee29b84\") " pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993057 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-stats-auth\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993083 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/adaacf02-1e93-42be-93cd-6b489b75e3a6-cert\") pod \"ingress-canary-sstkh\" (UID: \"adaacf02-1e93-42be-93cd-6b489b75e3a6\") " pod="openshift-ingress-canary/ingress-canary-sstkh" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993112 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-mountpoint-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993166 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72ca0a59-24a4-459e-a0d3-891d6ac90ed7-config\") pod \"kube-apiserver-operator-766d6c64bb-w9jp2\" (UID: \"72ca0a59-24a4-459e-a0d3-891d6ac90ed7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993685 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/72ca0a59-24a4-459e-a0d3-891d6ac90ed7-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-w9jp2\" (UID: \"72ca0a59-24a4-459e-a0d3-891d6ac90ed7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993712 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-socket-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993737 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdhx6\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-kube-api-access-qdhx6\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993764 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45dvx\" (UniqueName: \"kubernetes.io/projected/4c6a6c13-28a0-446d-8446-130794e1ee21-kube-api-access-45dvx\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993789 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8bac674-5c61-4782-9f74-6374a430e7fc-config-volume\") pod \"collect-profiles-29410230-xt77b\" (UID: \"d8bac674-5c61-4782-9f74-6374a430e7fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993829 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwqhh\" (UniqueName: \"kubernetes.io/projected/ca0af18b-162c-47f2-aa52-6bcd54a87a80-kube-api-access-nwqhh\") pod \"service-ca-operator-777779d784-x4vnj\" (UID: \"ca0af18b-162c-47f2-aa52-6bcd54a87a80\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993855 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2m9f\" (UniqueName: \"kubernetes.io/projected/625e7064-21f1-491d-bbc2-1d2b2faaa977-kube-api-access-k2m9f\") pod \"packageserver-d55dfcdfc-ccph8\" (UID: \"625e7064-21f1-491d-bbc2-1d2b2faaa977\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993891 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b337cc76-0681-41e9-9cdb-7c660ee29b84-serving-cert\") pod \"console-operator-58897d9998-944sj\" (UID: \"b337cc76-0681-41e9-9cdb-7c660ee29b84\") " pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993912 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/38933f63-180c-4831-858b-2efc49cf634a-srv-cert\") pod \"catalog-operator-68c6474976-crm6z\" (UID: \"38933f63-180c-4831-858b-2efc49cf634a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993935 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72ca0a59-24a4-459e-a0d3-891d6ac90ed7-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-w9jp2\" (UID: \"72ca0a59-24a4-459e-a0d3-891d6ac90ed7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993968 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22-metrics-tls\") pod \"dns-operator-744455d44c-lsb2b\" (UID: \"5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22\") " pod="openshift-dns-operator/dns-operator-744455d44c-lsb2b" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.993993 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-csi-data-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994029 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e7437d18-a515-4690-8ccf-65b8540426a1-certs\") pod \"machine-config-server-6gzcq\" (UID: \"e7437d18-a515-4690-8ccf-65b8540426a1\") " pod="openshift-machine-config-operator/machine-config-server-6gzcq" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994043 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/335915be-d5ad-4beb-929c-b41d0b7c4601-signing-cabundle\") pod \"service-ca-9c57cc56f-g5n2c\" (UID: \"335915be-d5ad-4beb-929c-b41d0b7c4601\") " pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994054 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/38933f63-180c-4831-858b-2efc49cf634a-profile-collector-cert\") pod \"catalog-operator-68c6474976-crm6z\" (UID: \"38933f63-180c-4831-858b-2efc49cf634a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994079 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-registration-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994100 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b337cc76-0681-41e9-9cdb-7c660ee29b84-trusted-ca\") pod \"console-operator-58897d9998-944sj\" (UID: \"b337cc76-0681-41e9-9cdb-7c660ee29b84\") " pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994121 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6p7vg\" (UniqueName: \"kubernetes.io/projected/71041089-3004-4e0b-990d-c6e471b38ea6-kube-api-access-6p7vg\") pod \"dns-default-kzzxw\" (UID: \"71041089-3004-4e0b-990d-c6e471b38ea6\") " pod="openshift-dns/dns-default-kzzxw" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994181 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18dc429c-7515-47a0-b008-b60a631a2723-trusted-ca\") pod \"ingress-operator-5b745b69d9-zfbpg\" (UID: \"18dc429c-7515-47a0-b008-b60a631a2723\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994208 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-metrics-certs\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994226 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/625e7064-21f1-491d-bbc2-1d2b2faaa977-apiservice-cert\") pod \"packageserver-d55dfcdfc-ccph8\" (UID: \"625e7064-21f1-491d-bbc2-1d2b2faaa977\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994271 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-plugins-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994297 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b123b9f1-7d6b-496c-87c2-7790b027abd6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-k4g4f\" (UID: \"b123b9f1-7d6b-496c-87c2-7790b027abd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994324 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z94tv\" (UniqueName: \"kubernetes.io/projected/032f1651-2326-461b-897e-35303c17f32c-kube-api-access-z94tv\") pod \"multus-admission-controller-857f4d67dd-5tzwh\" (UID: \"032f1651-2326-461b-897e-35303c17f32c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-5tzwh" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994347 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86vfr\" (UniqueName: \"kubernetes.io/projected/b123b9f1-7d6b-496c-87c2-7790b027abd6-kube-api-access-86vfr\") pod \"marketplace-operator-79b997595-k4g4f\" (UID: \"b123b9f1-7d6b-496c-87c2-7790b027abd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994368 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/625e7064-21f1-491d-bbc2-1d2b2faaa977-webhook-cert\") pod \"packageserver-d55dfcdfc-ccph8\" (UID: \"625e7064-21f1-491d-bbc2-1d2b2faaa977\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994463 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7jdz\" (UniqueName: \"kubernetes.io/projected/38933f63-180c-4831-858b-2efc49cf634a-kube-api-access-q7jdz\") pod \"catalog-operator-68c6474976-crm6z\" (UID: \"38933f63-180c-4831-858b-2efc49cf634a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994506 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpgsc\" (UniqueName: \"kubernetes.io/projected/9f035d02-e865-4fc5-a759-372db1d4e910-kube-api-access-mpgsc\") pod \"olm-operator-6b444d44fb-48czs\" (UID: \"9f035d02-e865-4fc5-a759-372db1d4e910\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994531 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b123b9f1-7d6b-496c-87c2-7790b027abd6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-k4g4f\" (UID: \"b123b9f1-7d6b-496c-87c2-7790b027abd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994587 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994649 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46sgh\" (UniqueName: \"kubernetes.io/projected/6ccc1894-d6b6-454c-ab68-45e1ffbc9124-kube-api-access-46sgh\") pod \"openshift-config-operator-7777fb866f-px5jz\" (UID: \"6ccc1894-d6b6-454c-ab68-45e1ffbc9124\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994677 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shqdx\" (UniqueName: \"kubernetes.io/projected/d8bac674-5c61-4782-9f74-6374a430e7fc-kube-api-access-shqdx\") pod \"collect-profiles-29410230-xt77b\" (UID: \"d8bac674-5c61-4782-9f74-6374a430e7fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994699 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4f81095d-3084-427f-8f0e-bdd180180c31-installation-pull-secrets\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994718 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-m95dz\" (UID: \"4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994741 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4f81095d-3084-427f-8f0e-bdd180180c31-registry-certificates\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994767 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ccc1894-d6b6-454c-ab68-45e1ffbc9124-serving-cert\") pod \"openshift-config-operator-7777fb866f-px5jz\" (UID: \"6ccc1894-d6b6-454c-ab68-45e1ffbc9124\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994795 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9f035d02-e865-4fc5-a759-372db1d4e910-profile-collector-cert\") pod \"olm-operator-6b444d44fb-48czs\" (UID: \"9f035d02-e865-4fc5-a759-372db1d4e910\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994824 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/40102f05-30f8-4552-8db6-140eced8121a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-wvsgh\" (UID: \"40102f05-30f8-4552-8db6-140eced8121a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994855 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/335915be-d5ad-4beb-929c-b41d0b7c4601-signing-key\") pod \"service-ca-9c57cc56f-g5n2c\" (UID: \"335915be-d5ad-4beb-929c-b41d0b7c4601\") " pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994884 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40102f05-30f8-4552-8db6-140eced8121a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-wvsgh\" (UID: \"40102f05-30f8-4552-8db6-140eced8121a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994910 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/032f1651-2326-461b-897e-35303c17f32c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-5tzwh\" (UID: \"032f1651-2326-461b-897e-35303c17f32c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-5tzwh" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994936 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/18dc429c-7515-47a0-b008-b60a631a2723-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zfbpg\" (UID: \"18dc429c-7515-47a0-b008-b60a631a2723\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994958 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f502d82d-fe0c-40c0-aeba-1b50934fd13a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9st46\" (UID: \"f502d82d-fe0c-40c0-aeba-1b50934fd13a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.994982 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-default-certificate\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995011 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-registry-tls\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995036 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40102f05-30f8-4552-8db6-140eced8121a-config\") pod \"kube-controller-manager-operator-78b949d7b-wvsgh\" (UID: \"40102f05-30f8-4552-8db6-140eced8121a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995077 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/18dc429c-7515-47a0-b008-b60a631a2723-metrics-tls\") pod \"ingress-operator-5b745b69d9-zfbpg\" (UID: \"18dc429c-7515-47a0-b008-b60a631a2723\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995101 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/71041089-3004-4e0b-990d-c6e471b38ea6-config-volume\") pod \"dns-default-kzzxw\" (UID: \"71041089-3004-4e0b-990d-c6e471b38ea6\") " pod="openshift-dns/dns-default-kzzxw" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995159 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q9jw\" (UniqueName: \"kubernetes.io/projected/4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a-kube-api-access-2q9jw\") pod \"package-server-manager-789f6589d5-m95dz\" (UID: \"4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995202 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv5hr\" (UniqueName: \"kubernetes.io/projected/5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22-kube-api-access-bv5hr\") pod \"dns-operator-744455d44c-lsb2b\" (UID: \"5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22\") " pod="openshift-dns-operator/dns-operator-744455d44c-lsb2b" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995229 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca0af18b-162c-47f2-aa52-6bcd54a87a80-serving-cert\") pod \"service-ca-operator-777779d784-x4vnj\" (UID: \"ca0af18b-162c-47f2-aa52-6bcd54a87a80\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995253 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48vjt\" (UniqueName: \"kubernetes.io/projected/335915be-d5ad-4beb-929c-b41d0b7c4601-kube-api-access-48vjt\") pod \"service-ca-9c57cc56f-g5n2c\" (UID: \"335915be-d5ad-4beb-929c-b41d0b7c4601\") " pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995286 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6t6rx\" (UniqueName: \"kubernetes.io/projected/18dc429c-7515-47a0-b008-b60a631a2723-kube-api-access-6t6rx\") pod \"ingress-operator-5b745b69d9-zfbpg\" (UID: \"18dc429c-7515-47a0-b008-b60a631a2723\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995321 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ddd2fe2c-4083-4793-81d8-20d7fc05fe3d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-29f7l\" (UID: \"ddd2fe2c-4083-4793-81d8-20d7fc05fe3d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995348 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-service-ca-bundle\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995374 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9f035d02-e865-4fc5-a759-372db1d4e910-srv-cert\") pod \"olm-operator-6b444d44fb-48czs\" (UID: \"9f035d02-e865-4fc5-a759-372db1d4e910\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995399 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4f81095d-3084-427f-8f0e-bdd180180c31-ca-trust-extracted\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995423 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/6ccc1894-d6b6-454c-ab68-45e1ffbc9124-available-featuregates\") pod \"openshift-config-operator-7777fb866f-px5jz\" (UID: \"6ccc1894-d6b6-454c-ab68-45e1ffbc9124\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995448 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-td4sg\" (UniqueName: \"kubernetes.io/projected/4c9a288d-97d2-4f8d-8e74-fb4913ee6627-kube-api-access-td4sg\") pod \"downloads-7954f5f757-pphhc\" (UID: \"4c9a288d-97d2-4f8d-8e74-fb4913ee6627\") " pod="openshift-console/downloads-7954f5f757-pphhc" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995488 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b337cc76-0681-41e9-9cdb-7c660ee29b84-config\") pod \"console-operator-58897d9998-944sj\" (UID: \"b337cc76-0681-41e9-9cdb-7c660ee29b84\") " pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995516 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnwpq\" (UniqueName: \"kubernetes.io/projected/adaacf02-1e93-42be-93cd-6b489b75e3a6-kube-api-access-pnwpq\") pod \"ingress-canary-sstkh\" (UID: \"adaacf02-1e93-42be-93cd-6b489b75e3a6\") " pod="openshift-ingress-canary/ingress-canary-sstkh" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995545 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t645p\" (UniqueName: \"kubernetes.io/projected/e7437d18-a515-4690-8ccf-65b8540426a1-kube-api-access-t645p\") pod \"machine-config-server-6gzcq\" (UID: \"e7437d18-a515-4690-8ccf-65b8540426a1\") " pod="openshift-machine-config-operator/machine-config-server-6gzcq" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995573 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8bac674-5c61-4782-9f74-6374a430e7fc-secret-volume\") pod \"collect-profiles-29410230-xt77b\" (UID: \"d8bac674-5c61-4782-9f74-6374a430e7fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995597 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e7437d18-a515-4690-8ccf-65b8540426a1-node-bootstrap-token\") pod \"machine-config-server-6gzcq\" (UID: \"e7437d18-a515-4690-8ccf-65b8540426a1\") " pod="openshift-machine-config-operator/machine-config-server-6gzcq" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995620 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/625e7064-21f1-491d-bbc2-1d2b2faaa977-tmpfs\") pod \"packageserver-d55dfcdfc-ccph8\" (UID: \"625e7064-21f1-491d-bbc2-1d2b2faaa977\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995644 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-bound-sa-token\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995671 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca0af18b-162c-47f2-aa52-6bcd54a87a80-config\") pod \"service-ca-operator-777779d784-x4vnj\" (UID: \"ca0af18b-162c-47f2-aa52-6bcd54a87a80\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995736 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/71041089-3004-4e0b-990d-c6e471b38ea6-metrics-tls\") pod \"dns-default-kzzxw\" (UID: \"71041089-3004-4e0b-990d-c6e471b38ea6\") " pod="openshift-dns/dns-default-kzzxw" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.995761 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f81095d-3084-427f-8f0e-bdd180180c31-trusted-ca\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:13 crc kubenswrapper[4935]: I1201 18:32:13.996872 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f81095d-3084-427f-8f0e-bdd180180c31-trusted-ca\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:14 crc kubenswrapper[4935]: E1201 18:32:13.999206 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:14.499188449 +0000 UTC m=+148.520817708 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.000437 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b123b9f1-7d6b-496c-87c2-7790b027abd6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-k4g4f\" (UID: \"b123b9f1-7d6b-496c-87c2-7790b027abd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.001563 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b337cc76-0681-41e9-9cdb-7c660ee29b84-trusted-ca\") pod \"console-operator-58897d9998-944sj\" (UID: \"b337cc76-0681-41e9-9cdb-7c660ee29b84\") " pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.002950 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18dc429c-7515-47a0-b008-b60a631a2723-trusted-ca\") pod \"ingress-operator-5b745b69d9-zfbpg\" (UID: \"18dc429c-7515-47a0-b008-b60a631a2723\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.008001 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f502d82d-fe0c-40c0-aeba-1b50934fd13a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9st46\" (UID: \"f502d82d-fe0c-40c0-aeba-1b50934fd13a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.013319 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8bac674-5c61-4782-9f74-6374a430e7fc-config-volume\") pod \"collect-profiles-29410230-xt77b\" (UID: \"d8bac674-5c61-4782-9f74-6374a430e7fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.014967 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/625e7064-21f1-491d-bbc2-1d2b2faaa977-apiservice-cert\") pod \"packageserver-d55dfcdfc-ccph8\" (UID: \"625e7064-21f1-491d-bbc2-1d2b2faaa977\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.016882 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/625e7064-21f1-491d-bbc2-1d2b2faaa977-webhook-cert\") pod \"packageserver-d55dfcdfc-ccph8\" (UID: \"625e7064-21f1-491d-bbc2-1d2b2faaa977\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.017648 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e7437d18-a515-4690-8ccf-65b8540426a1-certs\") pod \"machine-config-server-6gzcq\" (UID: \"e7437d18-a515-4690-8ccf-65b8540426a1\") " pod="openshift-machine-config-operator/machine-config-server-6gzcq" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.018683 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4f81095d-3084-427f-8f0e-bdd180180c31-installation-pull-secrets\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.019084 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72ca0a59-24a4-459e-a0d3-891d6ac90ed7-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-w9jp2\" (UID: \"72ca0a59-24a4-459e-a0d3-891d6ac90ed7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.019333 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72ca0a59-24a4-459e-a0d3-891d6ac90ed7-config\") pod \"kube-apiserver-operator-766d6c64bb-w9jp2\" (UID: \"72ca0a59-24a4-459e-a0d3-891d6ac90ed7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.023611 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.026177 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-service-ca-bundle\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.026878 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-stats-auth\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.034929 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4f81095d-3084-427f-8f0e-bdd180180c31-registry-certificates\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.035223 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/335915be-d5ad-4beb-929c-b41d0b7c4601-signing-key\") pod \"service-ca-9c57cc56f-g5n2c\" (UID: \"335915be-d5ad-4beb-929c-b41d0b7c4601\") " pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.035252 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4f81095d-3084-427f-8f0e-bdd180180c31-ca-trust-extracted\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.036651 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b337cc76-0681-41e9-9cdb-7c660ee29b84-serving-cert\") pod \"console-operator-58897d9998-944sj\" (UID: \"b337cc76-0681-41e9-9cdb-7c660ee29b84\") " pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.036927 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9f035d02-e865-4fc5-a759-372db1d4e910-profile-collector-cert\") pod \"olm-operator-6b444d44fb-48czs\" (UID: \"9f035d02-e865-4fc5-a759-372db1d4e910\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.037111 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/71041089-3004-4e0b-990d-c6e471b38ea6-metrics-tls\") pod \"dns-default-kzzxw\" (UID: \"71041089-3004-4e0b-990d-c6e471b38ea6\") " pod="openshift-dns/dns-default-kzzxw" Dec 01 18:32:14 crc kubenswrapper[4935]: W1201 18:32:14.037584 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77a79ecb_4a46_43f2_9187_7cd6fc3dc641.slice/crio-5c93b8b382da0d1112b5b214266c410db179adf49471f9acfba51e309f341058 WatchSource:0}: Error finding container 5c93b8b382da0d1112b5b214266c410db179adf49471f9acfba51e309f341058: Status 404 returned error can't find the container with id 5c93b8b382da0d1112b5b214266c410db179adf49471f9acfba51e309f341058 Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.039314 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b337cc76-0681-41e9-9cdb-7c660ee29b84-config\") pod \"console-operator-58897d9998-944sj\" (UID: \"b337cc76-0681-41e9-9cdb-7c660ee29b84\") " pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.039398 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/6ccc1894-d6b6-454c-ab68-45e1ffbc9124-available-featuregates\") pod \"openshift-config-operator-7777fb866f-px5jz\" (UID: \"6ccc1894-d6b6-454c-ab68-45e1ffbc9124\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.039852 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/38933f63-180c-4831-858b-2efc49cf634a-profile-collector-cert\") pod \"catalog-operator-68c6474976-crm6z\" (UID: \"38933f63-180c-4831-858b-2efc49cf634a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.039940 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ccc1894-d6b6-454c-ab68-45e1ffbc9124-serving-cert\") pod \"openshift-config-operator-7777fb866f-px5jz\" (UID: \"6ccc1894-d6b6-454c-ab68-45e1ffbc9124\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.040824 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/625e7064-21f1-491d-bbc2-1d2b2faaa977-tmpfs\") pod \"packageserver-d55dfcdfc-ccph8\" (UID: \"625e7064-21f1-491d-bbc2-1d2b2faaa977\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.041974 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b123b9f1-7d6b-496c-87c2-7790b027abd6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-k4g4f\" (UID: \"b123b9f1-7d6b-496c-87c2-7790b027abd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.042849 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f502d82d-fe0c-40c0-aeba-1b50934fd13a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9st46\" (UID: \"f502d82d-fe0c-40c0-aeba-1b50934fd13a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.043969 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40102f05-30f8-4552-8db6-140eced8121a-config\") pod \"kube-controller-manager-operator-78b949d7b-wvsgh\" (UID: \"40102f05-30f8-4552-8db6-140eced8121a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.045463 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ddd2fe2c-4083-4793-81d8-20d7fc05fe3d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-29f7l\" (UID: \"ddd2fe2c-4083-4793-81d8-20d7fc05fe3d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.045958 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-metrics-certs\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.046230 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.047892 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/032f1651-2326-461b-897e-35303c17f32c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-5tzwh\" (UID: \"032f1651-2326-461b-897e-35303c17f32c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-5tzwh" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.048594 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca0af18b-162c-47f2-aa52-6bcd54a87a80-serving-cert\") pod \"service-ca-operator-777779d784-x4vnj\" (UID: \"ca0af18b-162c-47f2-aa52-6bcd54a87a80\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.048678 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca0af18b-162c-47f2-aa52-6bcd54a87a80-config\") pod \"service-ca-operator-777779d784-x4vnj\" (UID: \"ca0af18b-162c-47f2-aa52-6bcd54a87a80\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.054702 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-registry-tls\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.062413 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9f035d02-e865-4fc5-a759-372db1d4e910-srv-cert\") pod \"olm-operator-6b444d44fb-48czs\" (UID: \"9f035d02-e865-4fc5-a759-372db1d4e910\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.062922 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f502d82d-fe0c-40c0-aeba-1b50934fd13a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9st46\" (UID: \"f502d82d-fe0c-40c0-aeba-1b50934fd13a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.063848 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40102f05-30f8-4552-8db6-140eced8121a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-wvsgh\" (UID: \"40102f05-30f8-4552-8db6-140eced8121a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.064483 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22-metrics-tls\") pod \"dns-operator-744455d44c-lsb2b\" (UID: \"5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22\") " pod="openshift-dns-operator/dns-operator-744455d44c-lsb2b" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.067362 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/18dc429c-7515-47a0-b008-b60a631a2723-metrics-tls\") pod \"ingress-operator-5b745b69d9-zfbpg\" (UID: \"18dc429c-7515-47a0-b008-b60a631a2723\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.067526 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k76qm\" (UniqueName: \"kubernetes.io/projected/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-kube-api-access-k76qm\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.067658 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-m95dz\" (UID: \"4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.068292 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8bac674-5c61-4782-9f74-6374a430e7fc-secret-volume\") pod \"collect-profiles-29410230-xt77b\" (UID: \"d8bac674-5c61-4782-9f74-6374a430e7fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.070088 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/38933f63-180c-4831-858b-2efc49cf634a-srv-cert\") pod \"catalog-operator-68c6474976-crm6z\" (UID: \"38933f63-180c-4831-858b-2efc49cf634a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.070745 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c0465c3c-119c-4e8a-8bb7-697b2690b1bf-default-certificate\") pod \"router-default-5444994796-p95fx\" (UID: \"c0465c3c-119c-4e8a-8bb7-697b2690b1bf\") " pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.071242 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/71041089-3004-4e0b-990d-c6e471b38ea6-config-volume\") pod \"dns-default-kzzxw\" (UID: \"71041089-3004-4e0b-990d-c6e471b38ea6\") " pod="openshift-dns/dns-default-kzzxw" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.080162 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e7437d18-a515-4690-8ccf-65b8540426a1-node-bootstrap-token\") pod \"machine-config-server-6gzcq\" (UID: \"e7437d18-a515-4690-8ccf-65b8540426a1\") " pod="openshift-machine-config-operator/machine-config-server-6gzcq" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.094168 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjdx2\" (UniqueName: \"kubernetes.io/projected/ddd2fe2c-4083-4793-81d8-20d7fc05fe3d-kube-api-access-mjdx2\") pod \"control-plane-machine-set-operator-78cbb6b69f-29f7l\" (UID: \"ddd2fe2c-4083-4793-81d8-20d7fc05fe3d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.097292 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:14 crc kubenswrapper[4935]: E1201 18:32:14.097545 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:14.597514808 +0000 UTC m=+148.619144067 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.098009 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnwpq\" (UniqueName: \"kubernetes.io/projected/adaacf02-1e93-42be-93cd-6b489b75e3a6-kube-api-access-pnwpq\") pod \"ingress-canary-sstkh\" (UID: \"adaacf02-1e93-42be-93cd-6b489b75e3a6\") " pod="openshift-ingress-canary/ingress-canary-sstkh" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.098198 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/adaacf02-1e93-42be-93cd-6b489b75e3a6-cert\") pod \"ingress-canary-sstkh\" (UID: \"adaacf02-1e93-42be-93cd-6b489b75e3a6\") " pod="openshift-ingress-canary/ingress-canary-sstkh" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.098309 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-mountpoint-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.098425 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-socket-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.098562 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45dvx\" (UniqueName: \"kubernetes.io/projected/4c6a6c13-28a0-446d-8446-130794e1ee21-kube-api-access-45dvx\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.098692 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-csi-data-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.098791 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-registration-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.098899 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-plugins-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.099026 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:14 crc kubenswrapper[4935]: E1201 18:32:14.099622 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:14.599611603 +0000 UTC m=+148.621240862 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.101532 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-socket-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.102292 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-mountpoint-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.102363 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-registration-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.102533 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-csi-data-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.102562 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4c6a6c13-28a0-446d-8446-130794e1ee21-plugins-dir\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.113495 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/adaacf02-1e93-42be-93cd-6b489b75e3a6-cert\") pod \"ingress-canary-sstkh\" (UID: \"adaacf02-1e93-42be-93cd-6b489b75e3a6\") " pod="openshift-ingress-canary/ingress-canary-sstkh" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.115194 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.125432 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shqdx\" (UniqueName: \"kubernetes.io/projected/d8bac674-5c61-4782-9f74-6374a430e7fc-kube-api-access-shqdx\") pod \"collect-profiles-29410230-xt77b\" (UID: \"d8bac674-5c61-4782-9f74-6374a430e7fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.131414 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46sgh\" (UniqueName: \"kubernetes.io/projected/6ccc1894-d6b6-454c-ab68-45e1ffbc9124-kube-api-access-46sgh\") pod \"openshift-config-operator-7777fb866f-px5jz\" (UID: \"6ccc1894-d6b6-454c-ab68-45e1ffbc9124\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.134674 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-jsbfp"] Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.135307 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z94tv\" (UniqueName: \"kubernetes.io/projected/032f1651-2326-461b-897e-35303c17f32c-kube-api-access-z94tv\") pod \"multus-admission-controller-857f4d67dd-5tzwh\" (UID: \"032f1651-2326-461b-897e-35303c17f32c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-5tzwh" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.150657 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.160248 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86vfr\" (UniqueName: \"kubernetes.io/projected/b123b9f1-7d6b-496c-87c2-7790b027abd6-kube-api-access-86vfr\") pod \"marketplace-operator-79b997595-k4g4f\" (UID: \"b123b9f1-7d6b-496c-87c2-7790b027abd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.163943 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.171460 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-5tzwh" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.179109 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8pbp\" (UniqueName: \"kubernetes.io/projected/b337cc76-0681-41e9-9cdb-7c660ee29b84-kube-api-access-h8pbp\") pod \"console-operator-58897d9998-944sj\" (UID: \"b337cc76-0681-41e9-9cdb-7c660ee29b84\") " pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.200451 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.200893 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6p7vg\" (UniqueName: \"kubernetes.io/projected/71041089-3004-4e0b-990d-c6e471b38ea6-kube-api-access-6p7vg\") pod \"dns-default-kzzxw\" (UID: \"71041089-3004-4e0b-990d-c6e471b38ea6\") " pod="openshift-dns/dns-default-kzzxw" Dec 01 18:32:14 crc kubenswrapper[4935]: E1201 18:32:14.200779 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:14.70075481 +0000 UTC m=+148.722384069 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.201332 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:14 crc kubenswrapper[4935]: E1201 18:32:14.201824 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:14.701815053 +0000 UTC m=+148.723444312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.233726 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.236054 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48vjt\" (UniqueName: \"kubernetes.io/projected/335915be-d5ad-4beb-929c-b41d0b7c4601-kube-api-access-48vjt\") pod \"service-ca-9c57cc56f-g5n2c\" (UID: \"335915be-d5ad-4beb-929c-b41d0b7c4601\") " pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.243847 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2m9f\" (UniqueName: \"kubernetes.io/projected/625e7064-21f1-491d-bbc2-1d2b2faaa977-kube-api-access-k2m9f\") pod \"packageserver-d55dfcdfc-ccph8\" (UID: \"625e7064-21f1-491d-bbc2-1d2b2faaa977\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.271801 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwqhh\" (UniqueName: \"kubernetes.io/projected/ca0af18b-162c-47f2-aa52-6bcd54a87a80-kube-api-access-nwqhh\") pod \"service-ca-operator-777779d784-x4vnj\" (UID: \"ca0af18b-162c-47f2-aa52-6bcd54a87a80\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.278601 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.285458 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6t6rx\" (UniqueName: \"kubernetes.io/projected/18dc429c-7515-47a0-b008-b60a631a2723-kube-api-access-6t6rx\") pod \"ingress-operator-5b745b69d9-zfbpg\" (UID: \"18dc429c-7515-47a0-b008-b60a631a2723\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.285630 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.301134 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7jdz\" (UniqueName: \"kubernetes.io/projected/38933f63-180c-4831-858b-2efc49cf634a-kube-api-access-q7jdz\") pod \"catalog-operator-68c6474976-crm6z\" (UID: \"38933f63-180c-4831-858b-2efc49cf634a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.304045 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:14 crc kubenswrapper[4935]: E1201 18:32:14.304887 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:14.804858998 +0000 UTC m=+148.826488257 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.306885 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:14 crc kubenswrapper[4935]: E1201 18:32:14.308271 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:14.808250224 +0000 UTC m=+148.829879483 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.308886 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-kzzxw" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.328061 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w"] Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.351513 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpgsc\" (UniqueName: \"kubernetes.io/projected/9f035d02-e865-4fc5-a759-372db1d4e910-kube-api-access-mpgsc\") pod \"olm-operator-6b444d44fb-48czs\" (UID: \"9f035d02-e865-4fc5-a759-372db1d4e910\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.353960 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/72ca0a59-24a4-459e-a0d3-891d6ac90ed7-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-w9jp2\" (UID: \"72ca0a59-24a4-459e-a0d3-891d6ac90ed7\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.367452 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdhx6\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-kube-api-access-qdhx6\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.384880 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-2dvf9"] Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.406760 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.410068 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:14 crc kubenswrapper[4935]: E1201 18:32:14.410650 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:14.910623468 +0000 UTC m=+148.932252727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.414845 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-bound-sa-token\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.416112 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t645p\" (UniqueName: \"kubernetes.io/projected/e7437d18-a515-4690-8ccf-65b8540426a1-kube-api-access-t645p\") pod \"machine-config-server-6gzcq\" (UID: \"e7437d18-a515-4690-8ccf-65b8540426a1\") " pod="openshift-machine-config-operator/machine-config-server-6gzcq" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.438030 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/40102f05-30f8-4552-8db6-140eced8121a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-wvsgh\" (UID: \"40102f05-30f8-4552-8db6-140eced8121a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.448007 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" event={"ID":"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f","Type":"ContainerStarted","Data":"ec9ec63deddf9a6ab5ee45ac7f33e221e6c47955a0181a42eb46405b64962dc1"} Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.448058 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" event={"ID":"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f","Type":"ContainerStarted","Data":"a031f88e4ac011a643bcc027f3955b0d47df904ffceb548eb0c88e0e4df0c1ad"} Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.449570 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.454106 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.456523 4935 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-4dmcv container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.456629 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" podUID="d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.458865 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-p95fx" event={"ID":"c0465c3c-119c-4e8a-8bb7-697b2690b1bf","Type":"ContainerStarted","Data":"64a08d54c1eda3fb074b3eb428ad8004fe1ff5a7de7d9a5b67f94f32f1f2837f"} Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.460952 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-td4sg\" (UniqueName: \"kubernetes.io/projected/4c9a288d-97d2-4f8d-8e74-fb4913ee6627-kube-api-access-td4sg\") pod \"downloads-7954f5f757-pphhc\" (UID: \"4c9a288d-97d2-4f8d-8e74-fb4913ee6627\") " pod="openshift-console/downloads-7954f5f757-pphhc" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.483693 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" event={"ID":"7dc60e7d-1c00-4801-bddb-27852f80fb7e","Type":"ContainerStarted","Data":"969291f69c0d4d54d971ed19d9b2de4564f35ac50d69446e7854c5501bad1a90"} Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.487590 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.489343 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q9jw\" (UniqueName: \"kubernetes.io/projected/4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a-kube-api-access-2q9jw\") pod \"package-server-manager-789f6589d5-m95dz\" (UID: \"4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.495445 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92"] Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.499321 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-pphhc" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.505823 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.512333 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:14 crc kubenswrapper[4935]: E1201 18:32:14.512995 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:15.012981993 +0000 UTC m=+149.034611252 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.514962 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.529050 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.573966 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/18dc429c-7515-47a0-b008-b60a631a2723-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zfbpg\" (UID: \"18dc429c-7515-47a0-b008-b60a631a2723\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.576677 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv5hr\" (UniqueName: \"kubernetes.io/projected/5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22-kube-api-access-bv5hr\") pod \"dns-operator-744455d44c-lsb2b\" (UID: \"5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22\") " pod="openshift-dns-operator/dns-operator-744455d44c-lsb2b" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.581601 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.584893 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.586690 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.594294 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45dvx\" (UniqueName: \"kubernetes.io/projected/4c6a6c13-28a0-446d-8446-130794e1ee21-kube-api-access-45dvx\") pod \"csi-hostpathplugin-rrv4j\" (UID: \"4c6a6c13-28a0-446d-8446-130794e1ee21\") " pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.597733 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6gzcq" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.615576 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:14 crc kubenswrapper[4935]: E1201 18:32:14.617209 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:15.117187124 +0000 UTC m=+149.138816383 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.622881 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnwpq\" (UniqueName: \"kubernetes.io/projected/adaacf02-1e93-42be-93cd-6b489b75e3a6-kube-api-access-pnwpq\") pod \"ingress-canary-sstkh\" (UID: \"adaacf02-1e93-42be-93cd-6b489b75e3a6\") " pod="openshift-ingress-canary/ingress-canary-sstkh" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.634998 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"ce1bfd0ff6173294f8658dc371d6166b7133652bcf54b4ad24b3c887e5cf5958"} Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.635047 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" event={"ID":"a7fff11c-c10d-4ac5-b353-75fc431ab510","Type":"ContainerStarted","Data":"a82fa28064d7a303f0886f103088f07088ce6b9276bb97e0458dd45f91a93872"} Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.635063 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz"] Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.635091 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt"] Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.649669 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" event={"ID":"77a79ecb-4a46-43f2-9187-7cd6fc3dc641","Type":"ContainerStarted","Data":"85ec1f91e2bf90738b2a574415f7fadc6b7bc29a1c1fb8ba829d61e000b0364c"} Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.649859 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" event={"ID":"77a79ecb-4a46-43f2-9187-7cd6fc3dc641","Type":"ContainerStarted","Data":"5c93b8b382da0d1112b5b214266c410db179adf49471f9acfba51e309f341058"} Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.656748 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.699008 4935 generic.go:334] "Generic (PLEG): container finished" podID="efebdc19-5ace-480b-8151-51e2ea78b4e8" containerID="aa5621d73de47e5d50d476509aff9c31518d04c22767342d9a71197a5ac3cb10" exitCode=0 Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.699173 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-j75h2" event={"ID":"efebdc19-5ace-480b-8151-51e2ea78b4e8","Type":"ContainerDied","Data":"aa5621d73de47e5d50d476509aff9c31518d04c22767342d9a71197a5ac3cb10"} Dec 01 18:32:14 crc kubenswrapper[4935]: W1201 18:32:14.706277 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2145f358_3d68_4239_9267_cfe321b24ec3.slice/crio-be852bcf06b984f3a6a2b3591aa482ff74db1e057d76ccba0e7cf263473e6f66 WatchSource:0}: Error finding container be852bcf06b984f3a6a2b3591aa482ff74db1e057d76ccba0e7cf263473e6f66: Status 404 returned error can't find the container with id be852bcf06b984f3a6a2b3591aa482ff74db1e057d76ccba0e7cf263473e6f66 Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.727010 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7"] Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.727796 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:14 crc kubenswrapper[4935]: E1201 18:32:14.729610 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:15.22958191 +0000 UTC m=+149.251211159 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.730031 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-lsb2b" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.739577 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.785408 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jsbfp" event={"ID":"6d5a0929-fc79-4de8-98b7-d5238c625373","Type":"ContainerStarted","Data":"345ce4d8ff32cc0d72384feaebff5a4350db954473f2b47eadca7e2570d2e13e"} Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.792442 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69"] Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.794872 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"753908ec335256d43fc5981db6bdafbba501a877c9aa9e808fdfe4b820f8a2c3"} Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.823645 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.829092 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:14 crc kubenswrapper[4935]: E1201 18:32:14.836650 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:15.33662971 +0000 UTC m=+149.358258969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.838216 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46"] Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.851489 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.909608 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-7kn22"] Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.922140 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-sstkh" Dec 01 18:32:14 crc kubenswrapper[4935]: I1201 18:32:14.932598 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:14 crc kubenswrapper[4935]: E1201 18:32:14.938200 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:15.438178949 +0000 UTC m=+149.459808208 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:14 crc kubenswrapper[4935]: W1201 18:32:14.939035 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf502d82d_fe0c_40c0_aeba_1b50934fd13a.slice/crio-1a4b797a6b31d2f1fc888db631b12e9801fd88e4c867f33a6ded883edac50be0 WatchSource:0}: Error finding container 1a4b797a6b31d2f1fc888db631b12e9801fd88e4c867f33a6ded883edac50be0: Status 404 returned error can't find the container with id 1a4b797a6b31d2f1fc888db631b12e9801fd88e4c867f33a6ded883edac50be0 Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.036521 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:15 crc kubenswrapper[4935]: E1201 18:32:15.036807 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:15.536778777 +0000 UTC m=+149.558408036 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.037288 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:15 crc kubenswrapper[4935]: E1201 18:32:15.037649 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:15.537633193 +0000 UTC m=+149.559262452 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.087959 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l"] Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.141868 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:15 crc kubenswrapper[4935]: E1201 18:32:15.142352 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:15.64233441 +0000 UTC m=+149.663963669 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.149161 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k4g4f"] Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.177178 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-px5jz"] Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.178740 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" podStartSLOduration=124.178712668 podStartE2EDuration="2m4.178712668s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:15.171738952 +0000 UTC m=+149.193368211" watchObservedRunningTime="2025-12-01 18:32:15.178712668 +0000 UTC m=+149.200341927" Dec 01 18:32:15 crc kubenswrapper[4935]: W1201 18:32:15.217640 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podddd2fe2c_4083_4793_81d8_20d7fc05fe3d.slice/crio-50537d2048b84fb70447f0dc6bee7cbe0032a6c39cdfdc2e663c8ff474e5445c WatchSource:0}: Error finding container 50537d2048b84fb70447f0dc6bee7cbe0032a6c39cdfdc2e663c8ff474e5445c: Status 404 returned error can't find the container with id 50537d2048b84fb70447f0dc6bee7cbe0032a6c39cdfdc2e663c8ff474e5445c Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.243463 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:15 crc kubenswrapper[4935]: E1201 18:32:15.244092 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:15.744073635 +0000 UTC m=+149.765702884 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:15 crc kubenswrapper[4935]: W1201 18:32:15.263433 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb123b9f1_7d6b_496c_87c2_7790b027abd6.slice/crio-eb943c88be27933d28775702f2dca11493ed59035987cd433373722cde2152b3 WatchSource:0}: Error finding container eb943c88be27933d28775702f2dca11493ed59035987cd433373722cde2152b3: Status 404 returned error can't find the container with id eb943c88be27933d28775702f2dca11493ed59035987cd433373722cde2152b3 Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.320131 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" podStartSLOduration=124.320077972 podStartE2EDuration="2m4.320077972s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:15.309594057 +0000 UTC m=+149.331223316" watchObservedRunningTime="2025-12-01 18:32:15.320077972 +0000 UTC m=+149.341707251" Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.346672 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:15 crc kubenswrapper[4935]: E1201 18:32:15.347926 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:15.847876664 +0000 UTC m=+149.869505923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.375361 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-kzzxw"] Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.390659 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-5tzwh"] Dec 01 18:32:15 crc kubenswrapper[4935]: W1201 18:32:15.391740 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6ccc1894_d6b6_454c_ab68_45e1ffbc9124.slice/crio-5e4f6af1bf31ae9d0a7048840966cbc8378bff643344533f996c14eab76a4c1e WatchSource:0}: Error finding container 5e4f6af1bf31ae9d0a7048840966cbc8378bff643344533f996c14eab76a4c1e: Status 404 returned error can't find the container with id 5e4f6af1bf31ae9d0a7048840966cbc8378bff643344533f996c14eab76a4c1e Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.435550 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh"] Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.449553 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:15 crc kubenswrapper[4935]: E1201 18:32:15.449989 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:15.949975631 +0000 UTC m=+149.971604890 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.512375 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" podStartSLOduration=124.512348425 podStartE2EDuration="2m4.512348425s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:15.508665241 +0000 UTC m=+149.530294500" watchObservedRunningTime="2025-12-01 18:32:15.512348425 +0000 UTC m=+149.533977684" Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.555011 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:15 crc kubenswrapper[4935]: E1201 18:32:15.555113 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:16.055086 +0000 UTC m=+150.076715259 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.555927 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:15 crc kubenswrapper[4935]: E1201 18:32:15.556399 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:16.056379641 +0000 UTC m=+150.078008900 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.558382 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b"] Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.656934 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:15 crc kubenswrapper[4935]: E1201 18:32:15.657604 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:16.157578729 +0000 UTC m=+150.179207988 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.692627 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-pqvcq" podStartSLOduration=124.692600835 podStartE2EDuration="2m4.692600835s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:15.69052251 +0000 UTC m=+149.712151999" watchObservedRunningTime="2025-12-01 18:32:15.692600835 +0000 UTC m=+149.714230094" Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.761349 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:15 crc kubenswrapper[4935]: E1201 18:32:15.761855 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:16.261834692 +0000 UTC m=+150.283463951 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.812632 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" event={"ID":"a7fff11c-c10d-4ac5-b353-75fc431ab510","Type":"ContainerStarted","Data":"6075d6972793a4f8bc78e97756a22149d3653c8939b4e629ad1c3fa468b75b46"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.841581 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg"] Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.857917 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" event={"ID":"77a79ecb-4a46-43f2-9187-7cd6fc3dc641","Type":"ContainerStarted","Data":"9f8c63a2cde6eaaaaba48680187fe63b54b312c313f4a03d3dcfa325a2a194c8"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.860673 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" event={"ID":"f502d82d-fe0c-40c0-aeba-1b50934fd13a","Type":"ContainerStarted","Data":"1a4b797a6b31d2f1fc888db631b12e9801fd88e4c867f33a6ded883edac50be0"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.862002 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:15 crc kubenswrapper[4935]: E1201 18:32:15.862842 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:16.362810253 +0000 UTC m=+150.384439582 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.868696 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" event={"ID":"d11cf478-7ab4-47c0-aada-7b470f927f7f","Type":"ContainerStarted","Data":"3005d5840c2cffff4c4e8eeed65cb35fd4ee11ac48b4acb752fc890f438f5734"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.874280 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l" event={"ID":"ddd2fe2c-4083-4793-81d8-20d7fc05fe3d","Type":"ContainerStarted","Data":"50537d2048b84fb70447f0dc6bee7cbe0032a6c39cdfdc2e663c8ff474e5445c"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.875262 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" event={"ID":"535dd9f1-99c2-430e-82ff-0a148a0331e7","Type":"ContainerStarted","Data":"90bd01dc2c7513bdd6953cfe1e752da135b6a5e4387c3a29fab5f2f367605bba"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.875998 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-kzzxw" event={"ID":"71041089-3004-4e0b-990d-c6e471b38ea6","Type":"ContainerStarted","Data":"29fa686a472a363e307800be7868b55f37131044821571bf2eb63b1867c41fe4"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.878512 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" event={"ID":"2b90e933-e7fe-4bfb-a2cc-47de5c67d631","Type":"ContainerStarted","Data":"45f47fe12306e97f87f63760f96c6b8f0f1d79a28a613bc4f53144659b95beaa"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.888323 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-pphhc"] Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.904831 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" event={"ID":"3ed2bb90-62d7-4254-8f3d-d744b8edce46","Type":"ContainerStarted","Data":"9c26d3e2a27850f7ada60b6c5e72871bbf4c07a3436da93a89aa2363214c8ddf"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.908059 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" event={"ID":"6ccc1894-d6b6-454c-ab68-45e1ffbc9124","Type":"ContainerStarted","Data":"5e4f6af1bf31ae9d0a7048840966cbc8378bff643344533f996c14eab76a4c1e"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.915506 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" event={"ID":"2145f358-3d68-4239-9267-cfe321b24ec3","Type":"ContainerStarted","Data":"be852bcf06b984f3a6a2b3591aa482ff74db1e057d76ccba0e7cf263473e6f66"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.923678 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt" event={"ID":"94cfb221-c0cb-4979-bab8-ce0124fb0470","Type":"ContainerStarted","Data":"4805db19e0b2e8f67208d5ce9b20cee1e97e34c094743b394dde2913b2988c4d"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.951554 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jsbfp" event={"ID":"6d5a0929-fc79-4de8-98b7-d5238c625373","Type":"ContainerStarted","Data":"aa5baa86be647043cfbc1ba8f798350336498315c229a5d7f0458519a8b5b898"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.952943 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" event={"ID":"b123b9f1-7d6b-496c-87c2-7790b027abd6","Type":"ContainerStarted","Data":"eb943c88be27933d28775702f2dca11493ed59035987cd433373722cde2152b3"} Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.966470 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:15 crc kubenswrapper[4935]: E1201 18:32:15.966811 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:16.466798668 +0000 UTC m=+150.488427927 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:15 crc kubenswrapper[4935]: I1201 18:32:15.984464 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-526sq" podStartSLOduration=124.984431385 podStartE2EDuration="2m4.984431385s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:15.979315337 +0000 UTC m=+150.000944596" watchObservedRunningTime="2025-12-01 18:32:15.984431385 +0000 UTC m=+150.006060644" Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.005665 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-944sj"] Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.012795 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" event={"ID":"d8bac674-5c61-4782-9f74-6374a430e7fc","Type":"ContainerStarted","Data":"f68090cc44c006b769f774fe160f214f715a0b49c30a352983e3d7ada3f234d6"} Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.022246 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6gzcq" event={"ID":"e7437d18-a515-4690-8ccf-65b8540426a1","Type":"ContainerStarted","Data":"95fa014a688be3f2e14f99a43dfed65cc98a1e8636e9e5aa52b56e300536f5ef"} Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.059990 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-p95fx" event={"ID":"c0465c3c-119c-4e8a-8bb7-697b2690b1bf","Type":"ContainerStarted","Data":"2034dee6e4d455637af74990ee1795efc40b2899f4688efa5b9ecaa84fae077b"} Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.068880 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:16 crc kubenswrapper[4935]: E1201 18:32:16.069258 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:16.569241026 +0000 UTC m=+150.590870285 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.080437 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-5tzwh" event={"ID":"032f1651-2326-461b-897e-35303c17f32c","Type":"ContainerStarted","Data":"a7045756869ac0dcaf1c0ff8cf7856d73e98e559b5c41b8f1a5779b5c518d3a2"} Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.102256 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" event={"ID":"65a97abc-c92c-4b07-8922-dace15327fb1","Type":"ContainerStarted","Data":"e8411d6ad6915eb36e59ded4258c4f64a67563b3d47971f085fb97f7e2d0b6c7"} Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.117955 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.172859 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:16 crc kubenswrapper[4935]: E1201 18:32:16.184507 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:16.684451099 +0000 UTC m=+150.706080358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.217899 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.242644 4935 patch_prober.go:28] interesting pod/router-default-5444994796-p95fx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 18:32:16 crc kubenswrapper[4935]: [-]has-synced failed: reason withheld Dec 01 18:32:16 crc kubenswrapper[4935]: [+]process-running ok Dec 01 18:32:16 crc kubenswrapper[4935]: healthz check failed Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.242718 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p95fx" podUID="c0465c3c-119c-4e8a-8bb7-697b2690b1bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.273894 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:16 crc kubenswrapper[4935]: E1201 18:32:16.274495 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:16.77447701 +0000 UTC m=+150.796106269 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.333544 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-86226" podStartSLOduration=125.333515931 podStartE2EDuration="2m5.333515931s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:16.331622072 +0000 UTC m=+150.353251331" watchObservedRunningTime="2025-12-01 18:32:16.333515931 +0000 UTC m=+150.355145190" Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.385722 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:16 crc kubenswrapper[4935]: E1201 18:32:16.386228 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:16.886209605 +0000 UTC m=+150.907838864 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.459431 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-dbvg7" podStartSLOduration=125.459405845 podStartE2EDuration="2m5.459405845s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:16.430983703 +0000 UTC m=+150.452612962" watchObservedRunningTime="2025-12-01 18:32:16.459405845 +0000 UTC m=+150.481035104" Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.490602 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:16 crc kubenswrapper[4935]: E1201 18:32:16.491197 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:16.991124479 +0000 UTC m=+151.012753738 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.491449 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:16 crc kubenswrapper[4935]: E1201 18:32:16.491879 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:16.991868632 +0000 UTC m=+151.013497891 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.560608 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5j26w" podStartSLOduration=125.560586693 podStartE2EDuration="2m5.560586693s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:16.536702283 +0000 UTC m=+150.558331542" watchObservedRunningTime="2025-12-01 18:32:16.560586693 +0000 UTC m=+150.582215952" Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.592793 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.594843 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-r9pw7" podStartSLOduration=125.59481426400001 podStartE2EDuration="2m5.594814264s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:16.566429474 +0000 UTC m=+150.588058743" watchObservedRunningTime="2025-12-01 18:32:16.594814264 +0000 UTC m=+150.616443523" Dec 01 18:32:16 crc kubenswrapper[4935]: E1201 18:32:16.601580 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:17.101542954 +0000 UTC m=+151.123172223 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.634982 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-p95fx" podStartSLOduration=125.634942749 podStartE2EDuration="2m5.634942749s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:16.608284712 +0000 UTC m=+150.629913971" watchObservedRunningTime="2025-12-01 18:32:16.634942749 +0000 UTC m=+150.656571998" Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.698412 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:16 crc kubenswrapper[4935]: E1201 18:32:16.698834 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:17.19880614 +0000 UTC m=+151.220435399 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.728264 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8"] Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.807762 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:16 crc kubenswrapper[4935]: E1201 18:32:16.808415 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:17.308392298 +0000 UTC m=+151.330021557 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:16 crc kubenswrapper[4935]: I1201 18:32:16.909739 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:16 crc kubenswrapper[4935]: E1201 18:32:16.910128 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:17.410112022 +0000 UTC m=+151.431741291 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.010816 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:17 crc kubenswrapper[4935]: E1201 18:32:17.011141 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:17.511095124 +0000 UTC m=+151.532724473 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.011454 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:17 crc kubenswrapper[4935]: E1201 18:32:17.011814 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:17.511798226 +0000 UTC m=+151.533427485 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.095716 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2"] Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.112564 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:17 crc kubenswrapper[4935]: E1201 18:32:17.112983 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:17.612963163 +0000 UTC m=+151.634592422 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.115402 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt" event={"ID":"94cfb221-c0cb-4979-bab8-ce0124fb0470","Type":"ContainerStarted","Data":"f20a45aa4b21b699d14ccc8f6e4ffed85186ab114411758d7a9a2bd327b3bdd9"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.123878 4935 patch_prober.go:28] interesting pod/router-default-5444994796-p95fx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 18:32:17 crc kubenswrapper[4935]: [-]has-synced failed: reason withheld Dec 01 18:32:17 crc kubenswrapper[4935]: [+]process-running ok Dec 01 18:32:17 crc kubenswrapper[4935]: healthz check failed Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.123926 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p95fx" podUID="c0465c3c-119c-4e8a-8bb7-697b2690b1bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.143591 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" event={"ID":"d11cf478-7ab4-47c0-aada-7b470f927f7f","Type":"ContainerStarted","Data":"5efdde1168b2d76ba33bc79cdd430b60a727d9dd5c66d82070f11f5cb7430555"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.171135 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-hl8t7" podStartSLOduration=126.171118347 podStartE2EDuration="2m6.171118347s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:17.17090221 +0000 UTC m=+151.192531469" watchObservedRunningTime="2025-12-01 18:32:17.171118347 +0000 UTC m=+151.192747606" Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.187822 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" event={"ID":"3ed2bb90-62d7-4254-8f3d-d744b8edce46","Type":"ContainerStarted","Data":"84d2adf09c51e7e3befe302123140754058c63a0d1fdc663fe6c6fa3dfc35b32"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.213752 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nnx69" podStartSLOduration=126.213724338 podStartE2EDuration="2m6.213724338s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:17.207541896 +0000 UTC m=+151.229171165" watchObservedRunningTime="2025-12-01 18:32:17.213724338 +0000 UTC m=+151.235353607" Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.213953 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:17 crc kubenswrapper[4935]: E1201 18:32:17.214269 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:17.714255275 +0000 UTC m=+151.735884534 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.214281 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" event={"ID":"40102f05-30f8-4552-8db6-140eced8121a","Type":"ContainerStarted","Data":"dccde078f6e67f220fe24caa3297a6e1e90a5bbc761ae7dd535987608eaf91af"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.237121 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" event={"ID":"625e7064-21f1-491d-bbc2-1d2b2faaa977","Type":"ContainerStarted","Data":"6fba48162e3f0cc0c80161674b97f438a4b58eba3678e32faabedf0756b28091"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.246532 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-g5n2c"] Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.267826 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-944sj" event={"ID":"b337cc76-0681-41e9-9cdb-7c660ee29b84","Type":"ContainerStarted","Data":"b1ce3777502739f36afac6a53007b7934dd15b9591d96e5d65d0e159ca3d141b"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.296440 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" event={"ID":"b123b9f1-7d6b-496c-87c2-7790b027abd6","Type":"ContainerStarted","Data":"97c93b96ebfda3c3d2c58a31109963e7a3b8bb0c85463d688df59d5b09bd54e8"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.297403 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.312280 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" event={"ID":"18dc429c-7515-47a0-b008-b60a631a2723","Type":"ContainerStarted","Data":"bdc6257404392e28178eee38ea30ce89a71aeb0bd245c7962affcca921dee314"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.364451 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:17 crc kubenswrapper[4935]: E1201 18:32:17.366976 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:17.86694404 +0000 UTC m=+151.888573299 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.371702 4935 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-k4g4f container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.376392 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" podUID="b123b9f1-7d6b-496c-87c2-7790b027abd6" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.419274 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj"] Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.438470 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs"] Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.450913 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" event={"ID":"6ccc1894-d6b6-454c-ab68-45e1ffbc9124","Type":"ContainerStarted","Data":"d7e3a061f0973b7fd5579ce2330a547728bc7aa650efb9c7531571b1595a1d36"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.466832 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:17 crc kubenswrapper[4935]: E1201 18:32:17.467254 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:17.96724022 +0000 UTC m=+151.988869479 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:17 crc kubenswrapper[4935]: W1201 18:32:17.473617 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f035d02_e865_4fc5_a759_372db1d4e910.slice/crio-ab72a306398386fb03b9add4e675d8f6346bcd2c612231bc7bc31b660f02369b WatchSource:0}: Error finding container ab72a306398386fb03b9add4e675d8f6346bcd2c612231bc7bc31b660f02369b: Status 404 returned error can't find the container with id ab72a306398386fb03b9add4e675d8f6346bcd2c612231bc7bc31b660f02369b Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.480475 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-pphhc" event={"ID":"4c9a288d-97d2-4f8d-8e74-fb4913ee6627","Type":"ContainerStarted","Data":"602d1704c8162f53986731b2c77baa52973c68e3cc4a7e7ced2e6d0e1064078a"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.481489 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" podStartSLOduration=126.481477311 podStartE2EDuration="2m6.481477311s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:17.367782076 +0000 UTC m=+151.389411335" watchObservedRunningTime="2025-12-01 18:32:17.481477311 +0000 UTC m=+151.503106570" Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.484340 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-lsb2b"] Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.537756 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rrv4j"] Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.561238 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z"] Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.562299 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" event={"ID":"2145f358-3d68-4239-9267-cfe321b24ec3","Type":"ContainerStarted","Data":"805edfc92557641774e1080bc43eabd7d6de5fd6a75d88a4f9d959dbec192889"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.569496 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:17 crc kubenswrapper[4935]: E1201 18:32:17.570644 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:18.070603605 +0000 UTC m=+152.092232864 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.573706 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-sstkh"] Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.605090 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz"] Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.642646 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jsbfp" event={"ID":"6d5a0929-fc79-4de8-98b7-d5238c625373","Type":"ContainerStarted","Data":"a2b714d237919aedccd78e4ff5f192698bee91c5de22a9fb59eff464ea2c34b5"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.651426 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l" event={"ID":"ddd2fe2c-4083-4793-81d8-20d7fc05fe3d","Type":"ContainerStarted","Data":"f6159cb70688dc4c469f850698d10af41826cd7382492fff4c40b22856738e4d"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.664061 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" event={"ID":"535dd9f1-99c2-430e-82ff-0a148a0331e7","Type":"ContainerStarted","Data":"2ef59c7d1f21d0e1b275a5e1f2063fa1604c87f57778e6e8d80ea91e073047c9"} Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.673960 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:17 crc kubenswrapper[4935]: E1201 18:32:17.674472 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:18.174457436 +0000 UTC m=+152.196086695 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.712572 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-jsbfp" podStartSLOduration=126.712538078 podStartE2EDuration="2m6.712538078s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:17.672603809 +0000 UTC m=+151.694233068" watchObservedRunningTime="2025-12-01 18:32:17.712538078 +0000 UTC m=+151.734167337" Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.715326 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-29f7l" podStartSLOduration=126.715311333 podStartE2EDuration="2m6.715311333s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:17.71039123 +0000 UTC m=+151.732020489" watchObservedRunningTime="2025-12-01 18:32:17.715311333 +0000 UTC m=+151.736940592" Dec 01 18:32:17 crc kubenswrapper[4935]: E1201 18:32:17.751534 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65a97abc_c92c_4b07_8922_dace15327fb1.slice/crio-6e5b06f462f3132650fe56f46b3dab286515e4a4e880c2be939d05e1ac34e1c6.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65a97abc_c92c_4b07_8922_dace15327fb1.slice/crio-conmon-6e5b06f462f3132650fe56f46b3dab286515e4a4e880c2be939d05e1ac34e1c6.scope\": RecentStats: unable to find data in memory cache]" Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.758059 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-2dvf9" podStartSLOduration=126.758036248 podStartE2EDuration="2m6.758036248s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:17.750498824 +0000 UTC m=+151.772128073" watchObservedRunningTime="2025-12-01 18:32:17.758036248 +0000 UTC m=+151.779665507" Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.778512 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:17 crc kubenswrapper[4935]: E1201 18:32:17.780951 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:18.280926788 +0000 UTC m=+152.302556037 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.882492 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:17 crc kubenswrapper[4935]: E1201 18:32:17.882956 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:18.382940012 +0000 UTC m=+152.404569271 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.984767 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:17 crc kubenswrapper[4935]: E1201 18:32:17.985345 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:18.485305476 +0000 UTC m=+152.506934735 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:17 crc kubenswrapper[4935]: I1201 18:32:17.985574 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:17 crc kubenswrapper[4935]: E1201 18:32:17.986167 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:18.486140352 +0000 UTC m=+152.507769611 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.086950 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:18 crc kubenswrapper[4935]: E1201 18:32:18.087196 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:18.587096283 +0000 UTC m=+152.608725532 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.089237 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:18 crc kubenswrapper[4935]: E1201 18:32:18.089876 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:18.589860729 +0000 UTC m=+152.611489988 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.128346 4935 patch_prober.go:28] interesting pod/router-default-5444994796-p95fx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 18:32:18 crc kubenswrapper[4935]: [-]has-synced failed: reason withheld Dec 01 18:32:18 crc kubenswrapper[4935]: [+]process-running ok Dec 01 18:32:18 crc kubenswrapper[4935]: healthz check failed Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.131905 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p95fx" podUID="c0465c3c-119c-4e8a-8bb7-697b2690b1bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.192502 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:18 crc kubenswrapper[4935]: E1201 18:32:18.201695 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:18.701664926 +0000 UTC m=+152.723294195 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.303858 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:18 crc kubenswrapper[4935]: E1201 18:32:18.304256 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:18.804240037 +0000 UTC m=+152.825869296 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.406937 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:18 crc kubenswrapper[4935]: E1201 18:32:18.407871 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:18.90784842 +0000 UTC m=+152.929477679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.508711 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:18 crc kubenswrapper[4935]: E1201 18:32:18.509063 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:19.009036008 +0000 UTC m=+153.030665267 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.609527 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:18 crc kubenswrapper[4935]: E1201 18:32:18.609951 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:19.109912417 +0000 UTC m=+153.131541676 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.610111 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:18 crc kubenswrapper[4935]: E1201 18:32:18.610581 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:19.110565567 +0000 UTC m=+153.132194826 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.719185 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:18 crc kubenswrapper[4935]: E1201 18:32:18.719539 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:19.219522066 +0000 UTC m=+153.241151325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.738069 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt" event={"ID":"94cfb221-c0cb-4979-bab8-ce0124fb0470","Type":"ContainerStarted","Data":"0b8bc2288213d4ce7054465a9702c157a9e4b5184250bf8da0d774816fe1198e"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.763456 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6gzcq" event={"ID":"e7437d18-a515-4690-8ccf-65b8540426a1","Type":"ContainerStarted","Data":"ca419e9ff133d0bf23c98745b782e2fdb7c5ddf578df01c935222233fae83265"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.798524 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-pphhc" event={"ID":"4c9a288d-97d2-4f8d-8e74-fb4913ee6627","Type":"ContainerStarted","Data":"afb9ce63fbfd2766bd9b4435f599c4af1ca4e66d771b87af511f518643820e26"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.800069 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-pphhc" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.807870 4935 patch_prober.go:28] interesting pod/downloads-7954f5f757-pphhc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.808222 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-pphhc" podUID="4c9a288d-97d2-4f8d-8e74-fb4913ee6627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.808498 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pc9rt" podStartSLOduration=127.808470554 podStartE2EDuration="2m7.808470554s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:18.789730823 +0000 UTC m=+152.811360082" watchObservedRunningTime="2025-12-01 18:32:18.808470554 +0000 UTC m=+152.830099813" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.810228 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-6gzcq" podStartSLOduration=7.810223349 podStartE2EDuration="7.810223349s" podCreationTimestamp="2025-12-01 18:32:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:18.808885217 +0000 UTC m=+152.830514476" watchObservedRunningTime="2025-12-01 18:32:18.810223349 +0000 UTC m=+152.831852598" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.824120 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:18 crc kubenswrapper[4935]: E1201 18:32:18.824497 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:19.324484462 +0000 UTC m=+153.346113721 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.842074 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" event={"ID":"9f035d02-e865-4fc5-a759-372db1d4e910","Type":"ContainerStarted","Data":"7bf4b014440664ce758bbe71c021bbb1282d6c5276df77ed25ed31f468b15e1b"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.842134 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" event={"ID":"9f035d02-e865-4fc5-a759-372db1d4e910","Type":"ContainerStarted","Data":"ab72a306398386fb03b9add4e675d8f6346bcd2c612231bc7bc31b660f02369b"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.842489 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.847354 4935 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-48czs container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.847423 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" podUID="9f035d02-e865-4fc5-a759-372db1d4e910" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.850279 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-pphhc" podStartSLOduration=127.85025679 podStartE2EDuration="2m7.85025679s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:18.848774615 +0000 UTC m=+152.870403874" watchObservedRunningTime="2025-12-01 18:32:18.85025679 +0000 UTC m=+152.871886049" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.865781 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" event={"ID":"d8bac674-5c61-4782-9f74-6374a430e7fc","Type":"ContainerStarted","Data":"1c23ef9b6e2724226042c04f6e05dbd2c565564ebae99b96f2638c912d1264fc"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.885995 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-sstkh" event={"ID":"adaacf02-1e93-42be-93cd-6b489b75e3a6","Type":"ContainerStarted","Data":"6673d194c4da60690af018b5dc9fa4538bbbfca71bd500cbb5f25bd2453d2db8"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.886523 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-sstkh" event={"ID":"adaacf02-1e93-42be-93cd-6b489b75e3a6","Type":"ContainerStarted","Data":"32cc341e03ebe99c81dc4d452567762dbed3e74c05c2be71f9cb8150f55217d7"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.893749 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" podStartSLOduration=127.893724149 podStartE2EDuration="2m7.893724149s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:18.890955193 +0000 UTC m=+152.912584452" watchObservedRunningTime="2025-12-01 18:32:18.893724149 +0000 UTC m=+152.915353408" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.922218 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" event={"ID":"38933f63-180c-4831-858b-2efc49cf634a","Type":"ContainerStarted","Data":"d7bf661b3b0754cca370189f3aa2b39148a26d847a18aa20389413e107536043"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.922567 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" event={"ID":"38933f63-180c-4831-858b-2efc49cf634a","Type":"ContainerStarted","Data":"e04b9ec36f05e4de70315df818bb13c03e402acbdfa1c6a083713c03f1fb830b"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.924781 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.924832 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" Dec 01 18:32:18 crc kubenswrapper[4935]: E1201 18:32:18.924919 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:19.424893305 +0000 UTC m=+153.446522564 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.937813 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.925882 4935 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-crm6z container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.938893 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" podUID="38933f63-180c-4831-858b-2efc49cf634a" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Dec 01 18:32:18 crc kubenswrapper[4935]: E1201 18:32:18.940682 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:19.440664754 +0000 UTC m=+153.462294213 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.951880 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-kzzxw" event={"ID":"71041089-3004-4e0b-990d-c6e471b38ea6","Type":"ContainerStarted","Data":"61ac228c054948aa8ccae20ba755288b401b11f046d78bfac5af71527ec7ffd2"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.952660 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-kzzxw" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.956727 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" event={"ID":"2b90e933-e7fe-4bfb-a2cc-47de5c67d631","Type":"ContainerStarted","Data":"d1b518529cba6fc8cb66aa25c0a7012384c1df5577863ff3bce563abc60aac7b"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.969646 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-sstkh" podStartSLOduration=7.969621322 podStartE2EDuration="7.969621322s" podCreationTimestamp="2025-12-01 18:32:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:18.969061715 +0000 UTC m=+152.990690974" watchObservedRunningTime="2025-12-01 18:32:18.969621322 +0000 UTC m=+152.991250571" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.969982 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" podStartSLOduration=127.969977323 podStartE2EDuration="2m7.969977323s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:18.920382625 +0000 UTC m=+152.942011884" watchObservedRunningTime="2025-12-01 18:32:18.969977323 +0000 UTC m=+152.991606582" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.982566 4935 generic.go:334] "Generic (PLEG): container finished" podID="6ccc1894-d6b6-454c-ab68-45e1ffbc9124" containerID="d7e3a061f0973b7fd5579ce2330a547728bc7aa650efb9c7531571b1595a1d36" exitCode=0 Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.982643 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" event={"ID":"6ccc1894-d6b6-454c-ab68-45e1ffbc9124","Type":"ContainerDied","Data":"d7e3a061f0973b7fd5579ce2330a547728bc7aa650efb9c7531571b1595a1d36"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.982674 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" event={"ID":"6ccc1894-d6b6-454c-ab68-45e1ffbc9124","Type":"ContainerStarted","Data":"4bc4ea71c3901bf6252d271b9c9840d18a6c695ba2aa73bb0b94c7cf53231b3e"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.983216 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.984609 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" event={"ID":"18dc429c-7515-47a0-b008-b60a631a2723","Type":"ContainerStarted","Data":"3a3a5bd28f4d6f7e61dda0a61d6a5b12300c066989427a8caa5564fefbdb0560"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.984632 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" event={"ID":"18dc429c-7515-47a0-b008-b60a631a2723","Type":"ContainerStarted","Data":"04fa03fc1e28fc54305a999d8512aa52ed3f9beace8ea78d8c972beeba5298df"} Dec 01 18:32:18 crc kubenswrapper[4935]: I1201 18:32:18.990182 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-kzzxw" podStartSLOduration=7.990137278 podStartE2EDuration="7.990137278s" podCreationTimestamp="2025-12-01 18:32:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:18.985589417 +0000 UTC m=+153.007218676" watchObservedRunningTime="2025-12-01 18:32:18.990137278 +0000 UTC m=+153.011766537" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.003851 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" event={"ID":"335915be-d5ad-4beb-929c-b41d0b7c4601","Type":"ContainerStarted","Data":"ecd54831c245aedcfce99e99d09c5385e33a76cd2d8c3498de238cd4dffa2c16"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.003927 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" event={"ID":"335915be-d5ad-4beb-929c-b41d0b7c4601","Type":"ContainerStarted","Data":"55fef235e73df5b27512f4e610b5caf22680d89d5c44e228ba8c2137b0241259"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.020404 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" event={"ID":"72ca0a59-24a4-459e-a0d3-891d6ac90ed7","Type":"ContainerStarted","Data":"d34332de06020c9cead2186b268813ac3289b58bf55ea8f04ceebdceaf5e0f6f"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.020461 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" event={"ID":"72ca0a59-24a4-459e-a0d3-891d6ac90ed7","Type":"ContainerStarted","Data":"119f248aa54d16978f5b8a4ee34e40f410c1942c7c05c2ccecfe34f0cd9559c5"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.021840 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" podStartSLOduration=128.021827951 podStartE2EDuration="2m8.021827951s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.021248723 +0000 UTC m=+153.042877972" watchObservedRunningTime="2025-12-01 18:32:19.021827951 +0000 UTC m=+153.043457210" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.022546 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" event={"ID":"4c6a6c13-28a0-446d-8446-130794e1ee21","Type":"ContainerStarted","Data":"26b6a05d933d21c880b21be2edabe0bc9e11638652d1b3642a06736ee20a6775"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.023958 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" event={"ID":"40102f05-30f8-4552-8db6-140eced8121a","Type":"ContainerStarted","Data":"cb0b3f11953f3a5f20f84fe3d3f7c49c2e7764a1edff888bd8712f0f8e755ccc"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.032824 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-5tzwh" event={"ID":"032f1651-2326-461b-897e-35303c17f32c","Type":"ContainerStarted","Data":"902a27d8003bf6aa41a4f96831217b892b874fd89393b7d8073bd98d723cb4f4"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.034777 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" event={"ID":"4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a","Type":"ContainerStarted","Data":"3b1ab76816865ff2a0e9239dc61dad93b63073fcd923c6802bd43b70c8127d27"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.034888 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" event={"ID":"4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a","Type":"ContainerStarted","Data":"6333393dde15d347c9bd7fdbc0bdd5bf841231956a335c9c39cfc23d25e625af"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.035346 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.036251 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" event={"ID":"ca0af18b-162c-47f2-aa52-6bcd54a87a80","Type":"ContainerStarted","Data":"1718faa819ca922b42a2ddc75cd64eaf5ebb87ce0c29eb456c43ab71ef647b59"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.036335 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" event={"ID":"ca0af18b-162c-47f2-aa52-6bcd54a87a80","Type":"ContainerStarted","Data":"5cc17cef5175564745f83eb531fc4905d152525562fc6566ccb2e47a56aa527c"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.037701 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" event={"ID":"2145f358-3d68-4239-9267-cfe321b24ec3","Type":"ContainerStarted","Data":"4cfb011fb81016e8760f938a8c19c41bf38ff4db145022e36f705c5d38bcc1c8"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.042959 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.043262 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:19.543235285 +0000 UTC m=+153.564864544 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.043657 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.047540 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:19.547529248 +0000 UTC m=+153.569158507 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.050635 4935 generic.go:334] "Generic (PLEG): container finished" podID="65a97abc-c92c-4b07-8922-dace15327fb1" containerID="6e5b06f462f3132650fe56f46b3dab286515e4a4e880c2be939d05e1ac34e1c6" exitCode=0 Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.050795 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" event={"ID":"65a97abc-c92c-4b07-8922-dace15327fb1","Type":"ContainerDied","Data":"6e5b06f462f3132650fe56f46b3dab286515e4a4e880c2be939d05e1ac34e1c6"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.068546 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-944sj" event={"ID":"b337cc76-0681-41e9-9cdb-7c660ee29b84","Type":"ContainerStarted","Data":"516f9eb08c41802aaa85a1d46c5d9af54b88d26e889074726d8163011485ce0c"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.070108 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.072140 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-7kn22" podStartSLOduration=128.07210305 podStartE2EDuration="2m8.07210305s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.0675616 +0000 UTC m=+153.089190869" watchObservedRunningTime="2025-12-01 18:32:19.07210305 +0000 UTC m=+153.093732309" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.076793 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-lsb2b" event={"ID":"5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22","Type":"ContainerStarted","Data":"7968b55167dc3813c2df64dc9e8a980a05722ef128817914ee7eb73f7ba385ef"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.076850 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-lsb2b" event={"ID":"5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22","Type":"ContainerStarted","Data":"cd6a7424010f468afed6f5dfbbb37e17b1d98c123af721a6832c21338dd2ce1d"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.079728 4935 patch_prober.go:28] interesting pod/console-operator-58897d9998-944sj container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.25:8443/readyz\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.079785 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-944sj" podUID="b337cc76-0681-41e9-9cdb-7c660ee29b84" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.25:8443/readyz\": dial tcp 10.217.0.25:8443: connect: connection refused" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.088335 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" event={"ID":"f502d82d-fe0c-40c0-aeba-1b50934fd13a","Type":"ContainerStarted","Data":"5c8b4cb6a6d13dd0375b1902941a08c9ccc8855b24398b263e18ec910f48454e"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.101536 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-j75h2" event={"ID":"efebdc19-5ace-480b-8151-51e2ea78b4e8","Type":"ContainerStarted","Data":"008b2503c1bf46d86dd1492d5bb82e228bcf3871efde790e72652002e1589335"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.101589 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-j75h2" event={"ID":"efebdc19-5ace-480b-8151-51e2ea78b4e8","Type":"ContainerStarted","Data":"542d07db7cae13ba3685a4383cce471da3b85d8057c95ab3539d4d47e10aa738"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.128902 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x4vnj" podStartSLOduration=128.128878461 podStartE2EDuration="2m8.128878461s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.092698089 +0000 UTC m=+153.114327348" watchObservedRunningTime="2025-12-01 18:32:19.128878461 +0000 UTC m=+153.150507720" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.136347 4935 patch_prober.go:28] interesting pod/router-default-5444994796-p95fx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 18:32:19 crc kubenswrapper[4935]: [-]has-synced failed: reason withheld Dec 01 18:32:19 crc kubenswrapper[4935]: [+]process-running ok Dec 01 18:32:19 crc kubenswrapper[4935]: healthz check failed Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.136865 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p95fx" podUID="c0465c3c-119c-4e8a-8bb7-697b2690b1bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.145930 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.147390 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" event={"ID":"625e7064-21f1-491d-bbc2-1d2b2faaa977","Type":"ContainerStarted","Data":"9daa5bb1d6371c561c8a9e3268a42c52709bcef5e95cfbea714027b026bc49c6"} Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.147452 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.154372 4935 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-k4g4f container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.154454 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" podUID="b123b9f1-7d6b-496c-87c2-7790b027abd6" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.159405 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:19.659375337 +0000 UTC m=+153.681004596 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.191909 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-g5n2c" podStartSLOduration=128.191886895 podStartE2EDuration="2m8.191886895s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.189622935 +0000 UTC m=+153.211252194" watchObservedRunningTime="2025-12-01 18:32:19.191886895 +0000 UTC m=+153.213516154" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.249515 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.270259 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5wx92" podStartSLOduration=128.270231114 podStartE2EDuration="2m8.270231114s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.243569468 +0000 UTC m=+153.265198747" watchObservedRunningTime="2025-12-01 18:32:19.270231114 +0000 UTC m=+153.291860373" Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.276469 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:19.776445917 +0000 UTC m=+153.798075176 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.289963 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zfbpg" podStartSLOduration=128.289926825 podStartE2EDuration="2m8.289926825s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.272963289 +0000 UTC m=+153.294592568" watchObservedRunningTime="2025-12-01 18:32:19.289926825 +0000 UTC m=+153.311556094" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.318474 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w9jp2" podStartSLOduration=128.31845514 podStartE2EDuration="2m8.31845514s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.317655845 +0000 UTC m=+153.339285114" watchObservedRunningTime="2025-12-01 18:32:19.31845514 +0000 UTC m=+153.340084399" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.351370 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.352051 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:19.852027851 +0000 UTC m=+153.873657110 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.364766 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" podStartSLOduration=128.364742026 podStartE2EDuration="2m8.364742026s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.358526773 +0000 UTC m=+153.380156032" watchObservedRunningTime="2025-12-01 18:32:19.364742026 +0000 UTC m=+153.386371285" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.392411 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-5tzwh" podStartSLOduration=128.392392523 podStartE2EDuration="2m8.392392523s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.391252098 +0000 UTC m=+153.412881367" watchObservedRunningTime="2025-12-01 18:32:19.392392523 +0000 UTC m=+153.414021782" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.429109 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wvsgh" podStartSLOduration=128.429089571 podStartE2EDuration="2m8.429089571s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.423703364 +0000 UTC m=+153.445332623" watchObservedRunningTime="2025-12-01 18:32:19.429089571 +0000 UTC m=+153.450718830" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.455349 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.455351 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" podStartSLOduration=128.455318044 podStartE2EDuration="2m8.455318044s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.454476249 +0000 UTC m=+153.476105508" watchObservedRunningTime="2025-12-01 18:32:19.455318044 +0000 UTC m=+153.476947303" Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.455772 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:19.955760218 +0000 UTC m=+153.977389477 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.557945 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.558838 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.058821184 +0000 UTC m=+154.080450433 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.559710 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-j75h2" podStartSLOduration=128.559684131 podStartE2EDuration="2m8.559684131s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.550775085 +0000 UTC m=+153.572404364" watchObservedRunningTime="2025-12-01 18:32:19.559684131 +0000 UTC m=+153.581313390" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.602818 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-944sj" podStartSLOduration=128.602798279 podStartE2EDuration="2m8.602798279s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.601163057 +0000 UTC m=+153.622792316" watchObservedRunningTime="2025-12-01 18:32:19.602798279 +0000 UTC m=+153.624427538" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.638898 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9st46" podStartSLOduration=128.638874997 podStartE2EDuration="2m8.638874997s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.63540633 +0000 UTC m=+153.657035589" watchObservedRunningTime="2025-12-01 18:32:19.638874997 +0000 UTC m=+153.660504256" Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.664043 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.664519 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.164502701 +0000 UTC m=+154.186131960 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.765416 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.765561 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.265537395 +0000 UTC m=+154.287166654 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.765738 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.766316 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.266298948 +0000 UTC m=+154.287928207 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.866742 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.866907 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.366872427 +0000 UTC m=+154.388501696 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.867418 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.867777 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.367765145 +0000 UTC m=+154.389394404 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.969510 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.969731 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.469692136 +0000 UTC m=+154.491321395 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:19 crc kubenswrapper[4935]: I1201 18:32:19.970091 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:19 crc kubenswrapper[4935]: E1201 18:32:19.970657 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.470647006 +0000 UTC m=+154.492276265 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.071715 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:20 crc kubenswrapper[4935]: E1201 18:32:20.071912 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.571883795 +0000 UTC m=+154.593513054 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.072099 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:20 crc kubenswrapper[4935]: E1201 18:32:20.072571 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.572554926 +0000 UTC m=+154.594184185 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.121843 4935 patch_prober.go:28] interesting pod/router-default-5444994796-p95fx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 18:32:20 crc kubenswrapper[4935]: [-]has-synced failed: reason withheld Dec 01 18:32:20 crc kubenswrapper[4935]: [+]process-running ok Dec 01 18:32:20 crc kubenswrapper[4935]: healthz check failed Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.121933 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p95fx" podUID="c0465c3c-119c-4e8a-8bb7-697b2690b1bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.147454 4935 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-ccph8 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.147548 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" podUID="625e7064-21f1-491d-bbc2-1d2b2faaa977" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.157205 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" event={"ID":"4d25a5e1-9a40-4668-9f5c-fa559b6f2c0a","Type":"ContainerStarted","Data":"e93dccfc6bb1a738695c33d74ca7502eac218cf183eea8db5af651e17a06914b"} Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.160558 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" event={"ID":"4c6a6c13-28a0-446d-8446-130794e1ee21","Type":"ContainerStarted","Data":"62b0b3415fce10d8863457d3a77c36dc068360be7a03d71b6e3bfc813551aa07"} Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.173389 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:20 crc kubenswrapper[4935]: E1201 18:32:20.173896 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.673859378 +0000 UTC m=+154.695488637 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.178228 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" event={"ID":"65a97abc-c92c-4b07-8922-dace15327fb1","Type":"ContainerStarted","Data":"543758f6d1db8ae4137df4c22d33cba6be014ba569736b6418c49ab7db72f097"} Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.182322 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-kzzxw" event={"ID":"71041089-3004-4e0b-990d-c6e471b38ea6","Type":"ContainerStarted","Data":"738d58a800960c59b15c3ddde7ca068364f6997e3bbc8626f28485e95dde1709"} Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.189108 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-lsb2b" event={"ID":"5c2ea1a3-e92a-43cf-99f4-17a8f3d7da22","Type":"ContainerStarted","Data":"237f00b058595bbde2642b2ce4746ce2e510c544f0d91fa6b075cd9df4e7faee"} Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.191774 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-5tzwh" event={"ID":"032f1651-2326-461b-897e-35303c17f32c","Type":"ContainerStarted","Data":"78e925b265432948a5caf6925be2259e8e1b350ec56e33c948b9dc802e0793af"} Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.194732 4935 patch_prober.go:28] interesting pod/downloads-7954f5f757-pphhc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.194786 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-pphhc" podUID="4c9a288d-97d2-4f8d-8e74-fb4913ee6627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.194825 4935 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-k4g4f container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.194850 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" podUID="b123b9f1-7d6b-496c-87c2-7790b027abd6" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.198637 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-48czs" Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.206042 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-crm6z" Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.222749 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" podStartSLOduration=129.222727993 podStartE2EDuration="2m9.222727993s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:19.716703481 +0000 UTC m=+153.738332750" watchObservedRunningTime="2025-12-01 18:32:20.222727993 +0000 UTC m=+154.244357252" Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.223422 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" podStartSLOduration=129.223415485 podStartE2EDuration="2m9.223415485s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:20.221810655 +0000 UTC m=+154.243439914" watchObservedRunningTime="2025-12-01 18:32:20.223415485 +0000 UTC m=+154.245044754" Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.278348 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:20 crc kubenswrapper[4935]: E1201 18:32:20.281061 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.781040052 +0000 UTC m=+154.802669311 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.297413 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ccph8" Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.322035 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-lsb2b" podStartSLOduration=129.322010343 podStartE2EDuration="2m9.322010343s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:20.321650021 +0000 UTC m=+154.343279280" watchObservedRunningTime="2025-12-01 18:32:20.322010343 +0000 UTC m=+154.343639602" Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.381777 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:20 crc kubenswrapper[4935]: E1201 18:32:20.383299 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.883283173 +0000 UTC m=+154.904912432 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.484362 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:20 crc kubenswrapper[4935]: E1201 18:32:20.485141 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:20.985127601 +0000 UTC m=+155.006756860 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.586369 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:20 crc kubenswrapper[4935]: E1201 18:32:20.586812 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:21.086791644 +0000 UTC m=+155.108420903 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.688160 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:20 crc kubenswrapper[4935]: E1201 18:32:20.688569 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:21.18855598 +0000 UTC m=+155.210185239 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.788937 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:20 crc kubenswrapper[4935]: E1201 18:32:20.789532 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:21.289508891 +0000 UTC m=+155.311138150 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.891348 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:20 crc kubenswrapper[4935]: E1201 18:32:20.891907 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:21.391881465 +0000 UTC m=+155.413510724 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.931673 4935 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.957255 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-944sj" Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.992921 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:20 crc kubenswrapper[4935]: E1201 18:32:20.993180 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:21.493124935 +0000 UTC m=+155.514754194 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:20 crc kubenswrapper[4935]: I1201 18:32:20.993275 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:20 crc kubenswrapper[4935]: E1201 18:32:20.993658 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:21.493645611 +0000 UTC m=+155.515274870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.095176 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:21 crc kubenswrapper[4935]: E1201 18:32:21.095418 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:21.595370225 +0000 UTC m=+155.616999474 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.096065 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:21 crc kubenswrapper[4935]: E1201 18:32:21.096513 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:21.596493291 +0000 UTC m=+155.618122550 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.120522 4935 patch_prober.go:28] interesting pod/router-default-5444994796-p95fx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 18:32:21 crc kubenswrapper[4935]: [-]has-synced failed: reason withheld Dec 01 18:32:21 crc kubenswrapper[4935]: [+]process-running ok Dec 01 18:32:21 crc kubenswrapper[4935]: healthz check failed Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.120584 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p95fx" podUID="c0465c3c-119c-4e8a-8bb7-697b2690b1bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.197288 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:21 crc kubenswrapper[4935]: E1201 18:32:21.197689 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:21.697672428 +0000 UTC m=+155.719301687 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.201448 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" event={"ID":"4c6a6c13-28a0-446d-8446-130794e1ee21","Type":"ContainerStarted","Data":"0dbe70117f834c570e2d4472939ad743b7e660512afa60ef82c8f61dd21f1e8c"} Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.201491 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" event={"ID":"4c6a6c13-28a0-446d-8446-130794e1ee21","Type":"ContainerStarted","Data":"e597add7c40e5c21747d3eb927a1b1bb2088fba5f0be687c155076d0262a4aef"} Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.204352 4935 patch_prober.go:28] interesting pod/downloads-7954f5f757-pphhc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.204406 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-pphhc" podUID="4c9a288d-97d2-4f8d-8e74-fb4913ee6627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.255588 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-px5jz" Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.301476 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:21 crc kubenswrapper[4935]: E1201 18:32:21.301942 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:21.801921032 +0000 UTC m=+155.823550291 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.404003 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:21 crc kubenswrapper[4935]: E1201 18:32:21.404170 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:21.904127931 +0000 UTC m=+155.925757190 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.404309 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:21 crc kubenswrapper[4935]: E1201 18:32:21.404659 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:21.904650727 +0000 UTC m=+155.926279986 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.504946 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:21 crc kubenswrapper[4935]: E1201 18:32:21.505503 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:22.005484364 +0000 UTC m=+156.027113643 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.607013 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:21 crc kubenswrapper[4935]: E1201 18:32:21.607381 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 18:32:22.107368624 +0000 UTC m=+156.128997883 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89k72" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.702885 4935 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-01T18:32:20.931713371Z","Handler":null,"Name":""} Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.707755 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:21 crc kubenswrapper[4935]: E1201 18:32:21.708682 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 18:32:22.208666175 +0000 UTC m=+156.230295434 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.708821 4935 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.708848 4935 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.809984 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.814394 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zpnpj"] Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.815916 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.823734 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.828381 4935 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.828656 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.838911 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zpnpj"] Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.875923 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89k72\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.911917 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.912179 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfcrp\" (UniqueName: \"kubernetes.io/projected/0efae526-1f2b-44b5-b69e-64af2f426aa8-kube-api-access-sfcrp\") pod \"certified-operators-zpnpj\" (UID: \"0efae526-1f2b-44b5-b69e-64af2f426aa8\") " pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.912212 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0efae526-1f2b-44b5-b69e-64af2f426aa8-catalog-content\") pod \"certified-operators-zpnpj\" (UID: \"0efae526-1f2b-44b5-b69e-64af2f426aa8\") " pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.912242 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0efae526-1f2b-44b5-b69e-64af2f426aa8-utilities\") pod \"certified-operators-zpnpj\" (UID: \"0efae526-1f2b-44b5-b69e-64af2f426aa8\") " pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.924800 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 01 18:32:21 crc kubenswrapper[4935]: I1201 18:32:21.979478 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.013735 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0efae526-1f2b-44b5-b69e-64af2f426aa8-catalog-content\") pod \"certified-operators-zpnpj\" (UID: \"0efae526-1f2b-44b5-b69e-64af2f426aa8\") " pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.013828 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0efae526-1f2b-44b5-b69e-64af2f426aa8-utilities\") pod \"certified-operators-zpnpj\" (UID: \"0efae526-1f2b-44b5-b69e-64af2f426aa8\") " pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.013946 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfcrp\" (UniqueName: \"kubernetes.io/projected/0efae526-1f2b-44b5-b69e-64af2f426aa8-kube-api-access-sfcrp\") pod \"certified-operators-zpnpj\" (UID: \"0efae526-1f2b-44b5-b69e-64af2f426aa8\") " pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.015267 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gkmsh"] Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.015508 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0efae526-1f2b-44b5-b69e-64af2f426aa8-catalog-content\") pod \"certified-operators-zpnpj\" (UID: \"0efae526-1f2b-44b5-b69e-64af2f426aa8\") " pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.015801 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0efae526-1f2b-44b5-b69e-64af2f426aa8-utilities\") pod \"certified-operators-zpnpj\" (UID: \"0efae526-1f2b-44b5-b69e-64af2f426aa8\") " pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.026039 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.029249 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.031122 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gkmsh"] Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.049030 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfcrp\" (UniqueName: \"kubernetes.io/projected/0efae526-1f2b-44b5-b69e-64af2f426aa8-kube-api-access-sfcrp\") pod \"certified-operators-zpnpj\" (UID: \"0efae526-1f2b-44b5-b69e-64af2f426aa8\") " pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.119971 4935 patch_prober.go:28] interesting pod/router-default-5444994796-p95fx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 18:32:22 crc kubenswrapper[4935]: [-]has-synced failed: reason withheld Dec 01 18:32:22 crc kubenswrapper[4935]: [+]process-running ok Dec 01 18:32:22 crc kubenswrapper[4935]: healthz check failed Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.120025 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p95fx" podUID="c0465c3c-119c-4e8a-8bb7-697b2690b1bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.135615 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.218289 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rmnk\" (UniqueName: \"kubernetes.io/projected/66550924-3006-4d90-b516-ac5ea6155bbc-kube-api-access-2rmnk\") pod \"community-operators-gkmsh\" (UID: \"66550924-3006-4d90-b516-ac5ea6155bbc\") " pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.218660 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66550924-3006-4d90-b516-ac5ea6155bbc-utilities\") pod \"community-operators-gkmsh\" (UID: \"66550924-3006-4d90-b516-ac5ea6155bbc\") " pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.218733 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66550924-3006-4d90-b516-ac5ea6155bbc-catalog-content\") pod \"community-operators-gkmsh\" (UID: \"66550924-3006-4d90-b516-ac5ea6155bbc\") " pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.228043 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gsqq5"] Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.230422 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" event={"ID":"4c6a6c13-28a0-446d-8446-130794e1ee21","Type":"ContainerStarted","Data":"55b43d66ad70d005177468ec4471f3b1efbeb0cdef01cd717996ebf2f13d65f7"} Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.231360 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.247026 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gsqq5"] Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.278915 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-rrv4j" podStartSLOduration=11.278860068 podStartE2EDuration="11.278860068s" podCreationTimestamp="2025-12-01 18:32:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:22.268365302 +0000 UTC m=+156.289994591" watchObservedRunningTime="2025-12-01 18:32:22.278860068 +0000 UTC m=+156.300489327" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.320123 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rmnk\" (UniqueName: \"kubernetes.io/projected/66550924-3006-4d90-b516-ac5ea6155bbc-kube-api-access-2rmnk\") pod \"community-operators-gkmsh\" (UID: \"66550924-3006-4d90-b516-ac5ea6155bbc\") " pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.320203 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66550924-3006-4d90-b516-ac5ea6155bbc-utilities\") pod \"community-operators-gkmsh\" (UID: \"66550924-3006-4d90-b516-ac5ea6155bbc\") " pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.320261 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66550924-3006-4d90-b516-ac5ea6155bbc-catalog-content\") pod \"community-operators-gkmsh\" (UID: \"66550924-3006-4d90-b516-ac5ea6155bbc\") " pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.321285 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66550924-3006-4d90-b516-ac5ea6155bbc-catalog-content\") pod \"community-operators-gkmsh\" (UID: \"66550924-3006-4d90-b516-ac5ea6155bbc\") " pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.322284 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66550924-3006-4d90-b516-ac5ea6155bbc-utilities\") pod \"community-operators-gkmsh\" (UID: \"66550924-3006-4d90-b516-ac5ea6155bbc\") " pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.349733 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rmnk\" (UniqueName: \"kubernetes.io/projected/66550924-3006-4d90-b516-ac5ea6155bbc-kube-api-access-2rmnk\") pod \"community-operators-gkmsh\" (UID: \"66550924-3006-4d90-b516-ac5ea6155bbc\") " pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.370014 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-89k72"] Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.405291 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.405312 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mvlbn"] Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.410563 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.414920 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mvlbn"] Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.421018 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cd7zv\" (UniqueName: \"kubernetes.io/projected/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-kube-api-access-cd7zv\") pod \"certified-operators-gsqq5\" (UID: \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\") " pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.421118 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-catalog-content\") pod \"certified-operators-gsqq5\" (UID: \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\") " pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.421166 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-utilities\") pod \"certified-operators-gsqq5\" (UID: \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\") " pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.462380 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.462423 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.466836 4935 patch_prober.go:28] interesting pod/console-f9d7485db-dbvg7 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.466884 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dbvg7" podUID="6881ae5d-31b3-4749-bd1a-db65599d48d3" containerName="console" probeResult="failure" output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.518261 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.524074 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-catalog-content\") pod \"certified-operators-gsqq5\" (UID: \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\") " pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.524135 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-utilities\") pod \"certified-operators-gsqq5\" (UID: \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\") " pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.524200 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558a5c77-7d48-4ebf-af67-d42e717939d5-catalog-content\") pod \"community-operators-mvlbn\" (UID: \"558a5c77-7d48-4ebf-af67-d42e717939d5\") " pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.524238 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558a5c77-7d48-4ebf-af67-d42e717939d5-utilities\") pod \"community-operators-mvlbn\" (UID: \"558a5c77-7d48-4ebf-af67-d42e717939d5\") " pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.524291 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cd7zv\" (UniqueName: \"kubernetes.io/projected/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-kube-api-access-cd7zv\") pod \"certified-operators-gsqq5\" (UID: \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\") " pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.524331 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5cdc\" (UniqueName: \"kubernetes.io/projected/558a5c77-7d48-4ebf-af67-d42e717939d5-kube-api-access-h5cdc\") pod \"community-operators-mvlbn\" (UID: \"558a5c77-7d48-4ebf-af67-d42e717939d5\") " pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.525573 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-catalog-content\") pod \"certified-operators-gsqq5\" (UID: \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\") " pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.525779 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-utilities\") pod \"certified-operators-gsqq5\" (UID: \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\") " pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.546967 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cd7zv\" (UniqueName: \"kubernetes.io/projected/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-kube-api-access-cd7zv\") pod \"certified-operators-gsqq5\" (UID: \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\") " pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.585915 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zpnpj"] Dec 01 18:32:22 crc kubenswrapper[4935]: W1201 18:32:22.590196 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0efae526_1f2b_44b5_b69e_64af2f426aa8.slice/crio-d8433b439ce6c27ff74def9ea4ef1127281f253ec3cec4c9bf625e2a7e0ff87e WatchSource:0}: Error finding container d8433b439ce6c27ff74def9ea4ef1127281f253ec3cec4c9bf625e2a7e0ff87e: Status 404 returned error can't find the container with id d8433b439ce6c27ff74def9ea4ef1127281f253ec3cec4c9bf625e2a7e0ff87e Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.593126 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.593313 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.606724 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.615540 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.627190 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558a5c77-7d48-4ebf-af67-d42e717939d5-catalog-content\") pod \"community-operators-mvlbn\" (UID: \"558a5c77-7d48-4ebf-af67-d42e717939d5\") " pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.627284 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558a5c77-7d48-4ebf-af67-d42e717939d5-utilities\") pod \"community-operators-mvlbn\" (UID: \"558a5c77-7d48-4ebf-af67-d42e717939d5\") " pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.627382 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5cdc\" (UniqueName: \"kubernetes.io/projected/558a5c77-7d48-4ebf-af67-d42e717939d5-kube-api-access-h5cdc\") pod \"community-operators-mvlbn\" (UID: \"558a5c77-7d48-4ebf-af67-d42e717939d5\") " pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.630985 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558a5c77-7d48-4ebf-af67-d42e717939d5-catalog-content\") pod \"community-operators-mvlbn\" (UID: \"558a5c77-7d48-4ebf-af67-d42e717939d5\") " pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.631522 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558a5c77-7d48-4ebf-af67-d42e717939d5-utilities\") pod \"community-operators-mvlbn\" (UID: \"558a5c77-7d48-4ebf-af67-d42e717939d5\") " pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.666529 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5cdc\" (UniqueName: \"kubernetes.io/projected/558a5c77-7d48-4ebf-af67-d42e717939d5-kube-api-access-h5cdc\") pod \"community-operators-mvlbn\" (UID: \"558a5c77-7d48-4ebf-af67-d42e717939d5\") " pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.737502 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:32:22 crc kubenswrapper[4935]: I1201 18:32:22.756027 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gkmsh"] Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.035820 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gsqq5"] Dec 01 18:32:23 crc kubenswrapper[4935]: W1201 18:32:23.074463 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf5e8422_11c7_41d2_9c3f_4ed3aa3cc10a.slice/crio-9f7ba5bc3f73af729e83d470204f9609cadac064295309a42ecec5564c7c109c WatchSource:0}: Error finding container 9f7ba5bc3f73af729e83d470204f9609cadac064295309a42ecec5564c7c109c: Status 404 returned error can't find the container with id 9f7ba5bc3f73af729e83d470204f9609cadac064295309a42ecec5564c7c109c Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.120770 4935 patch_prober.go:28] interesting pod/router-default-5444994796-p95fx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 18:32:23 crc kubenswrapper[4935]: [-]has-synced failed: reason withheld Dec 01 18:32:23 crc kubenswrapper[4935]: [+]process-running ok Dec 01 18:32:23 crc kubenswrapper[4935]: healthz check failed Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.120833 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p95fx" podUID="c0465c3c-119c-4e8a-8bb7-697b2690b1bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.217573 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mvlbn"] Dec 01 18:32:23 crc kubenswrapper[4935]: W1201 18:32:23.223916 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod558a5c77_7d48_4ebf_af67_d42e717939d5.slice/crio-e2c7aad749a73c86604fff2c0bd5ff6e6d45fde62effc10482a4442fea1547d3 WatchSource:0}: Error finding container e2c7aad749a73c86604fff2c0bd5ff6e6d45fde62effc10482a4442fea1547d3: Status 404 returned error can't find the container with id e2c7aad749a73c86604fff2c0bd5ff6e6d45fde62effc10482a4442fea1547d3 Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.238018 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" event={"ID":"4f81095d-3084-427f-8f0e-bdd180180c31","Type":"ContainerStarted","Data":"758e0c26883cd13f794fc41d860114619e79bfce697305bba16d165e631a5277"} Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.238100 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" event={"ID":"4f81095d-3084-427f-8f0e-bdd180180c31","Type":"ContainerStarted","Data":"26004290f015a1ed5f5dc6aeaff71951ccb4753bb0c2bc9127436eac375148b5"} Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.238250 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.241522 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsqq5" event={"ID":"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a","Type":"ContainerStarted","Data":"9f7ba5bc3f73af729e83d470204f9609cadac064295309a42ecec5564c7c109c"} Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.243413 4935 generic.go:334] "Generic (PLEG): container finished" podID="0efae526-1f2b-44b5-b69e-64af2f426aa8" containerID="267b437c3bb6d2605400e1fd365ca449566fd021beb23765d7ed8a919643909f" exitCode=0 Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.243494 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zpnpj" event={"ID":"0efae526-1f2b-44b5-b69e-64af2f426aa8","Type":"ContainerDied","Data":"267b437c3bb6d2605400e1fd365ca449566fd021beb23765d7ed8a919643909f"} Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.243541 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zpnpj" event={"ID":"0efae526-1f2b-44b5-b69e-64af2f426aa8","Type":"ContainerStarted","Data":"d8433b439ce6c27ff74def9ea4ef1127281f253ec3cec4c9bf625e2a7e0ff87e"} Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.245465 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvlbn" event={"ID":"558a5c77-7d48-4ebf-af67-d42e717939d5","Type":"ContainerStarted","Data":"e2c7aad749a73c86604fff2c0bd5ff6e6d45fde62effc10482a4442fea1547d3"} Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.247428 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.247936 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gkmsh" event={"ID":"66550924-3006-4d90-b516-ac5ea6155bbc","Type":"ContainerStarted","Data":"5a5da0878716880a651408d57c3d1d135a50c2e87be893bcc038dc2790602dcb"} Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.247985 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gkmsh" event={"ID":"66550924-3006-4d90-b516-ac5ea6155bbc","Type":"ContainerStarted","Data":"2194d6e3df49685218f8d8d68861998f11d9a1e45de8634c3c04b6e4dcbe839f"} Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.255492 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-j75h2" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.265288 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" podStartSLOduration=132.265268388 podStartE2EDuration="2m12.265268388s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:23.262839493 +0000 UTC m=+157.284468752" watchObservedRunningTime="2025-12-01 18:32:23.265268388 +0000 UTC m=+157.286897647" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.288059 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.290451 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.296690 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.296778 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.303496 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.455126 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ad06da62-67e0-4ee7-930b-e91126f164cc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ad06da62-67e0-4ee7-930b-e91126f164cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.455742 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ad06da62-67e0-4ee7-930b-e91126f164cc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ad06da62-67e0-4ee7-930b-e91126f164cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.557556 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ad06da62-67e0-4ee7-930b-e91126f164cc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ad06da62-67e0-4ee7-930b-e91126f164cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.557655 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ad06da62-67e0-4ee7-930b-e91126f164cc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ad06da62-67e0-4ee7-930b-e91126f164cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.557727 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ad06da62-67e0-4ee7-930b-e91126f164cc-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ad06da62-67e0-4ee7-930b-e91126f164cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.577263 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ad06da62-67e0-4ee7-930b-e91126f164cc-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ad06da62-67e0-4ee7-930b-e91126f164cc\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.627486 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.792524 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.792580 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.813725 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rtvzn"] Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.827410 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.827564 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.830323 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.864795 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rtvzn"] Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.971258 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585dfe75-4262-4b8b-9874-25e51b01cafd-catalog-content\") pod \"redhat-marketplace-rtvzn\" (UID: \"585dfe75-4262-4b8b-9874-25e51b01cafd\") " pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.971361 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585dfe75-4262-4b8b-9874-25e51b01cafd-utilities\") pod \"redhat-marketplace-rtvzn\" (UID: \"585dfe75-4262-4b8b-9874-25e51b01cafd\") " pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:32:23 crc kubenswrapper[4935]: I1201 18:32:23.971456 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxqjg\" (UniqueName: \"kubernetes.io/projected/585dfe75-4262-4b8b-9874-25e51b01cafd-kube-api-access-cxqjg\") pod \"redhat-marketplace-rtvzn\" (UID: \"585dfe75-4262-4b8b-9874-25e51b01cafd\") " pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.005569 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 18:32:24 crc kubenswrapper[4935]: W1201 18:32:24.015745 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podad06da62_67e0_4ee7_930b_e91126f164cc.slice/crio-1bcbbd5e99ff56efe151cf3b1b52663de813cb6d92a7923b2d63871ba795005e WatchSource:0}: Error finding container 1bcbbd5e99ff56efe151cf3b1b52663de813cb6d92a7923b2d63871ba795005e: Status 404 returned error can't find the container with id 1bcbbd5e99ff56efe151cf3b1b52663de813cb6d92a7923b2d63871ba795005e Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.072667 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585dfe75-4262-4b8b-9874-25e51b01cafd-catalog-content\") pod \"redhat-marketplace-rtvzn\" (UID: \"585dfe75-4262-4b8b-9874-25e51b01cafd\") " pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.072746 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585dfe75-4262-4b8b-9874-25e51b01cafd-utilities\") pod \"redhat-marketplace-rtvzn\" (UID: \"585dfe75-4262-4b8b-9874-25e51b01cafd\") " pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.072789 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxqjg\" (UniqueName: \"kubernetes.io/projected/585dfe75-4262-4b8b-9874-25e51b01cafd-kube-api-access-cxqjg\") pod \"redhat-marketplace-rtvzn\" (UID: \"585dfe75-4262-4b8b-9874-25e51b01cafd\") " pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.073841 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585dfe75-4262-4b8b-9874-25e51b01cafd-catalog-content\") pod \"redhat-marketplace-rtvzn\" (UID: \"585dfe75-4262-4b8b-9874-25e51b01cafd\") " pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.073842 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585dfe75-4262-4b8b-9874-25e51b01cafd-utilities\") pod \"redhat-marketplace-rtvzn\" (UID: \"585dfe75-4262-4b8b-9874-25e51b01cafd\") " pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.094672 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxqjg\" (UniqueName: \"kubernetes.io/projected/585dfe75-4262-4b8b-9874-25e51b01cafd-kube-api-access-cxqjg\") pod \"redhat-marketplace-rtvzn\" (UID: \"585dfe75-4262-4b8b-9874-25e51b01cafd\") " pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.116913 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.120484 4935 patch_prober.go:28] interesting pod/router-default-5444994796-p95fx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 18:32:24 crc kubenswrapper[4935]: [-]has-synced failed: reason withheld Dec 01 18:32:24 crc kubenswrapper[4935]: [+]process-running ok Dec 01 18:32:24 crc kubenswrapper[4935]: healthz check failed Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.120543 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p95fx" podUID="c0465c3c-119c-4e8a-8bb7-697b2690b1bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.206914 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.210571 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hxwbt"] Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.211956 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.227573 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxwbt"] Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.263049 4935 generic.go:334] "Generic (PLEG): container finished" podID="558a5c77-7d48-4ebf-af67-d42e717939d5" containerID="829101e68e69c319e12e2c9611bb5d4e5a3219836164c767be408621d035815f" exitCode=0 Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.263390 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvlbn" event={"ID":"558a5c77-7d48-4ebf-af67-d42e717939d5","Type":"ContainerDied","Data":"829101e68e69c319e12e2c9611bb5d4e5a3219836164c767be408621d035815f"} Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.270608 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ad06da62-67e0-4ee7-930b-e91126f164cc","Type":"ContainerStarted","Data":"1bcbbd5e99ff56efe151cf3b1b52663de813cb6d92a7923b2d63871ba795005e"} Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.276429 4935 generic.go:334] "Generic (PLEG): container finished" podID="66550924-3006-4d90-b516-ac5ea6155bbc" containerID="5a5da0878716880a651408d57c3d1d135a50c2e87be893bcc038dc2790602dcb" exitCode=0 Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.276631 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gkmsh" event={"ID":"66550924-3006-4d90-b516-ac5ea6155bbc","Type":"ContainerDied","Data":"5a5da0878716880a651408d57c3d1d135a50c2e87be893bcc038dc2790602dcb"} Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.284692 4935 generic.go:334] "Generic (PLEG): container finished" podID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" containerID="099e4680cab07777b7f828dda3b53d8dbbcdd1cfb72c998b24d54ffb79345c93" exitCode=0 Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.285259 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsqq5" event={"ID":"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a","Type":"ContainerDied","Data":"099e4680cab07777b7f828dda3b53d8dbbcdd1cfb72c998b24d54ffb79345c93"} Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.295099 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.295448 4935 generic.go:334] "Generic (PLEG): container finished" podID="d8bac674-5c61-4782-9f74-6374a430e7fc" containerID="1c23ef9b6e2724226042c04f6e05dbd2c565564ebae99b96f2638c912d1264fc" exitCode=0 Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.295648 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" event={"ID":"d8bac674-5c61-4782-9f74-6374a430e7fc","Type":"ContainerDied","Data":"1c23ef9b6e2724226042c04f6e05dbd2c565564ebae99b96f2638c912d1264fc"} Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.304931 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tnkbz" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.346231 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.346291 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.380015 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgssv\" (UniqueName: \"kubernetes.io/projected/b23712c2-6e9e-46eb-a875-8db435d4eabc-kube-api-access-hgssv\") pod \"redhat-marketplace-hxwbt\" (UID: \"b23712c2-6e9e-46eb-a875-8db435d4eabc\") " pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.380071 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b23712c2-6e9e-46eb-a875-8db435d4eabc-catalog-content\") pod \"redhat-marketplace-hxwbt\" (UID: \"b23712c2-6e9e-46eb-a875-8db435d4eabc\") " pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.380115 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b23712c2-6e9e-46eb-a875-8db435d4eabc-utilities\") pod \"redhat-marketplace-hxwbt\" (UID: \"b23712c2-6e9e-46eb-a875-8db435d4eabc\") " pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.483966 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgssv\" (UniqueName: \"kubernetes.io/projected/b23712c2-6e9e-46eb-a875-8db435d4eabc-kube-api-access-hgssv\") pod \"redhat-marketplace-hxwbt\" (UID: \"b23712c2-6e9e-46eb-a875-8db435d4eabc\") " pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.484116 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b23712c2-6e9e-46eb-a875-8db435d4eabc-catalog-content\") pod \"redhat-marketplace-hxwbt\" (UID: \"b23712c2-6e9e-46eb-a875-8db435d4eabc\") " pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.484261 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b23712c2-6e9e-46eb-a875-8db435d4eabc-utilities\") pod \"redhat-marketplace-hxwbt\" (UID: \"b23712c2-6e9e-46eb-a875-8db435d4eabc\") " pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.487564 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b23712c2-6e9e-46eb-a875-8db435d4eabc-utilities\") pod \"redhat-marketplace-hxwbt\" (UID: \"b23712c2-6e9e-46eb-a875-8db435d4eabc\") " pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.488295 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b23712c2-6e9e-46eb-a875-8db435d4eabc-catalog-content\") pod \"redhat-marketplace-hxwbt\" (UID: \"b23712c2-6e9e-46eb-a875-8db435d4eabc\") " pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.500847 4935 patch_prober.go:28] interesting pod/downloads-7954f5f757-pphhc container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.500903 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-pphhc" podUID="4c9a288d-97d2-4f8d-8e74-fb4913ee6627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.503033 4935 patch_prober.go:28] interesting pod/downloads-7954f5f757-pphhc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.503068 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-pphhc" podUID="4c9a288d-97d2-4f8d-8e74-fb4913ee6627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.42:8080/\": dial tcp 10.217.0.42:8080: connect: connection refused" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.511032 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgssv\" (UniqueName: \"kubernetes.io/projected/b23712c2-6e9e-46eb-a875-8db435d4eabc-kube-api-access-hgssv\") pod \"redhat-marketplace-hxwbt\" (UID: \"b23712c2-6e9e-46eb-a875-8db435d4eabc\") " pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.538317 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rtvzn"] Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.563555 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.636922 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:32:24 crc kubenswrapper[4935]: I1201 18:32:24.854575 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxwbt"] Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.023883 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-67krg"] Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.028009 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.031999 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.047482 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-67krg"] Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.105539 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-utilities\") pod \"redhat-operators-67krg\" (UID: \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\") " pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.105601 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pf2x\" (UniqueName: \"kubernetes.io/projected/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-kube-api-access-5pf2x\") pod \"redhat-operators-67krg\" (UID: \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\") " pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.105642 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-catalog-content\") pod \"redhat-operators-67krg\" (UID: \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\") " pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.122855 4935 patch_prober.go:28] interesting pod/router-default-5444994796-p95fx container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 18:32:25 crc kubenswrapper[4935]: [-]has-synced failed: reason withheld Dec 01 18:32:25 crc kubenswrapper[4935]: [+]process-running ok Dec 01 18:32:25 crc kubenswrapper[4935]: healthz check failed Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.122924 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p95fx" podUID="c0465c3c-119c-4e8a-8bb7-697b2690b1bf" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.211433 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-utilities\") pod \"redhat-operators-67krg\" (UID: \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\") " pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.211537 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pf2x\" (UniqueName: \"kubernetes.io/projected/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-kube-api-access-5pf2x\") pod \"redhat-operators-67krg\" (UID: \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\") " pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.211588 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-catalog-content\") pod \"redhat-operators-67krg\" (UID: \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\") " pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.212600 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-utilities\") pod \"redhat-operators-67krg\" (UID: \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\") " pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.212820 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-catalog-content\") pod \"redhat-operators-67krg\" (UID: \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\") " pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.238044 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pf2x\" (UniqueName: \"kubernetes.io/projected/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-kube-api-access-5pf2x\") pod \"redhat-operators-67krg\" (UID: \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\") " pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.305828 4935 generic.go:334] "Generic (PLEG): container finished" podID="ad06da62-67e0-4ee7-930b-e91126f164cc" containerID="d6df039c9122721d00640f1f19a7aa44b13dea75101f6797a16f872733918c87" exitCode=0 Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.305917 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ad06da62-67e0-4ee7-930b-e91126f164cc","Type":"ContainerDied","Data":"d6df039c9122721d00640f1f19a7aa44b13dea75101f6797a16f872733918c87"} Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.319379 4935 generic.go:334] "Generic (PLEG): container finished" podID="585dfe75-4262-4b8b-9874-25e51b01cafd" containerID="3fb7254d543be8979761acc8f84edc07d265e98ba396fe94fdd8ddf1eb2b8ab4" exitCode=0 Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.319608 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rtvzn" event={"ID":"585dfe75-4262-4b8b-9874-25e51b01cafd","Type":"ContainerDied","Data":"3fb7254d543be8979761acc8f84edc07d265e98ba396fe94fdd8ddf1eb2b8ab4"} Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.319647 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rtvzn" event={"ID":"585dfe75-4262-4b8b-9874-25e51b01cafd","Type":"ContainerStarted","Data":"9edfa0003dca4c0480958c4d42c12034035391df0d90e9df8cc6288f2ae6bc05"} Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.324657 4935 generic.go:334] "Generic (PLEG): container finished" podID="b23712c2-6e9e-46eb-a875-8db435d4eabc" containerID="a57bd30b9398fce26a4ba3f59a0984afb2bbb4c7d98c4877e2d6d7c789768150" exitCode=0 Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.326157 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxwbt" event={"ID":"b23712c2-6e9e-46eb-a875-8db435d4eabc","Type":"ContainerDied","Data":"a57bd30b9398fce26a4ba3f59a0984afb2bbb4c7d98c4877e2d6d7c789768150"} Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.326194 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxwbt" event={"ID":"b23712c2-6e9e-46eb-a875-8db435d4eabc","Type":"ContainerStarted","Data":"e5008744dc7e69e631e1a074671cee0e6da22f6952a487d349a304b080533cce"} Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.404201 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7nwkm"] Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.408061 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.408663 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.416861 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7nwkm"] Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.520454 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89a33869-1bc5-478d-bf66-8d6cde7e4991-catalog-content\") pod \"redhat-operators-7nwkm\" (UID: \"89a33869-1bc5-478d-bf66-8d6cde7e4991\") " pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.520559 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6xtv\" (UniqueName: \"kubernetes.io/projected/89a33869-1bc5-478d-bf66-8d6cde7e4991-kube-api-access-q6xtv\") pod \"redhat-operators-7nwkm\" (UID: \"89a33869-1bc5-478d-bf66-8d6cde7e4991\") " pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.520841 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89a33869-1bc5-478d-bf66-8d6cde7e4991-utilities\") pod \"redhat-operators-7nwkm\" (UID: \"89a33869-1bc5-478d-bf66-8d6cde7e4991\") " pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.623340 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89a33869-1bc5-478d-bf66-8d6cde7e4991-utilities\") pod \"redhat-operators-7nwkm\" (UID: \"89a33869-1bc5-478d-bf66-8d6cde7e4991\") " pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.623423 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89a33869-1bc5-478d-bf66-8d6cde7e4991-catalog-content\") pod \"redhat-operators-7nwkm\" (UID: \"89a33869-1bc5-478d-bf66-8d6cde7e4991\") " pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.623477 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6xtv\" (UniqueName: \"kubernetes.io/projected/89a33869-1bc5-478d-bf66-8d6cde7e4991-kube-api-access-q6xtv\") pod \"redhat-operators-7nwkm\" (UID: \"89a33869-1bc5-478d-bf66-8d6cde7e4991\") " pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.624940 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89a33869-1bc5-478d-bf66-8d6cde7e4991-utilities\") pod \"redhat-operators-7nwkm\" (UID: \"89a33869-1bc5-478d-bf66-8d6cde7e4991\") " pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.625033 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89a33869-1bc5-478d-bf66-8d6cde7e4991-catalog-content\") pod \"redhat-operators-7nwkm\" (UID: \"89a33869-1bc5-478d-bf66-8d6cde7e4991\") " pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.647029 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6xtv\" (UniqueName: \"kubernetes.io/projected/89a33869-1bc5-478d-bf66-8d6cde7e4991-kube-api-access-q6xtv\") pod \"redhat-operators-7nwkm\" (UID: \"89a33869-1bc5-478d-bf66-8d6cde7e4991\") " pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.653714 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.744061 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.826036 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8bac674-5c61-4782-9f74-6374a430e7fc-config-volume\") pod \"d8bac674-5c61-4782-9f74-6374a430e7fc\" (UID: \"d8bac674-5c61-4782-9f74-6374a430e7fc\") " Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.826093 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8bac674-5c61-4782-9f74-6374a430e7fc-secret-volume\") pod \"d8bac674-5c61-4782-9f74-6374a430e7fc\" (UID: \"d8bac674-5c61-4782-9f74-6374a430e7fc\") " Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.826136 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shqdx\" (UniqueName: \"kubernetes.io/projected/d8bac674-5c61-4782-9f74-6374a430e7fc-kube-api-access-shqdx\") pod \"d8bac674-5c61-4782-9f74-6374a430e7fc\" (UID: \"d8bac674-5c61-4782-9f74-6374a430e7fc\") " Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.828131 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8bac674-5c61-4782-9f74-6374a430e7fc-config-volume" (OuterVolumeSpecName: "config-volume") pod "d8bac674-5c61-4782-9f74-6374a430e7fc" (UID: "d8bac674-5c61-4782-9f74-6374a430e7fc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.831520 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8bac674-5c61-4782-9f74-6374a430e7fc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d8bac674-5c61-4782-9f74-6374a430e7fc" (UID: "d8bac674-5c61-4782-9f74-6374a430e7fc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.831764 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8bac674-5c61-4782-9f74-6374a430e7fc-kube-api-access-shqdx" (OuterVolumeSpecName: "kube-api-access-shqdx") pod "d8bac674-5c61-4782-9f74-6374a430e7fc" (UID: "d8bac674-5c61-4782-9f74-6374a430e7fc"). InnerVolumeSpecName "kube-api-access-shqdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.928464 4935 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8bac674-5c61-4782-9f74-6374a430e7fc-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.928497 4935 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8bac674-5c61-4782-9f74-6374a430e7fc-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.928509 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shqdx\" (UniqueName: \"kubernetes.io/projected/d8bac674-5c61-4782-9f74-6374a430e7fc-kube-api-access-shqdx\") on node \"crc\" DevicePath \"\"" Dec 01 18:32:25 crc kubenswrapper[4935]: I1201 18:32:25.933069 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-67krg"] Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.094883 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7nwkm"] Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.124273 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.129245 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-p95fx" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.213357 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 18:32:26 crc kubenswrapper[4935]: E1201 18:32:26.219406 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8bac674-5c61-4782-9f74-6374a430e7fc" containerName="collect-profiles" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.219468 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8bac674-5c61-4782-9f74-6374a430e7fc" containerName="collect-profiles" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.219790 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8bac674-5c61-4782-9f74-6374a430e7fc" containerName="collect-profiles" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.220369 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.220731 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.226208 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.227903 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.335052 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33c0c124-b04a-4219-bbc1-f57e2c0eb65f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"33c0c124-b04a-4219-bbc1-f57e2c0eb65f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.335118 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33c0c124-b04a-4219-bbc1-f57e2c0eb65f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"33c0c124-b04a-4219-bbc1-f57e2c0eb65f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.343632 4935 generic.go:334] "Generic (PLEG): container finished" podID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" containerID="5768c20ff6a0157bebf54df7468c52792b5f89093497c51874dc5ed9e3adbf24" exitCode=0 Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.343728 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67krg" event={"ID":"91c00d11-75b5-492f-8a4d-74e87a6aa2fe","Type":"ContainerDied","Data":"5768c20ff6a0157bebf54df7468c52792b5f89093497c51874dc5ed9e3adbf24"} Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.343795 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67krg" event={"ID":"91c00d11-75b5-492f-8a4d-74e87a6aa2fe","Type":"ContainerStarted","Data":"5951c873011cc706c15431968bbc6989a07227b96ef3b9063070a213e3cdb913"} Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.347172 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" event={"ID":"d8bac674-5c61-4782-9f74-6374a430e7fc","Type":"ContainerDied","Data":"f68090cc44c006b769f774fe160f214f715a0b49c30a352983e3d7ada3f234d6"} Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.347212 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f68090cc44c006b769f774fe160f214f715a0b49c30a352983e3d7ada3f234d6" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.347274 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.352223 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nwkm" event={"ID":"89a33869-1bc5-478d-bf66-8d6cde7e4991","Type":"ContainerStarted","Data":"96f80e6da0a614e707b7f2b609c3738879ff06808640e66331e9df0ae1389b7e"} Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.441110 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33c0c124-b04a-4219-bbc1-f57e2c0eb65f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"33c0c124-b04a-4219-bbc1-f57e2c0eb65f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.441241 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33c0c124-b04a-4219-bbc1-f57e2c0eb65f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"33c0c124-b04a-4219-bbc1-f57e2c0eb65f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.441381 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33c0c124-b04a-4219-bbc1-f57e2c0eb65f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"33c0c124-b04a-4219-bbc1-f57e2c0eb65f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.469257 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33c0c124-b04a-4219-bbc1-f57e2c0eb65f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"33c0c124-b04a-4219-bbc1-f57e2c0eb65f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.554940 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.680461 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.846377 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ad06da62-67e0-4ee7-930b-e91126f164cc-kubelet-dir\") pod \"ad06da62-67e0-4ee7-930b-e91126f164cc\" (UID: \"ad06da62-67e0-4ee7-930b-e91126f164cc\") " Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.846606 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ad06da62-67e0-4ee7-930b-e91126f164cc-kube-api-access\") pod \"ad06da62-67e0-4ee7-930b-e91126f164cc\" (UID: \"ad06da62-67e0-4ee7-930b-e91126f164cc\") " Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.847443 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad06da62-67e0-4ee7-930b-e91126f164cc-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ad06da62-67e0-4ee7-930b-e91126f164cc" (UID: "ad06da62-67e0-4ee7-930b-e91126f164cc"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.854874 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad06da62-67e0-4ee7-930b-e91126f164cc-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ad06da62-67e0-4ee7-930b-e91126f164cc" (UID: "ad06da62-67e0-4ee7-930b-e91126f164cc"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.948757 4935 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ad06da62-67e0-4ee7-930b-e91126f164cc-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 18:32:26 crc kubenswrapper[4935]: I1201 18:32:26.948792 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ad06da62-67e0-4ee7-930b-e91126f164cc-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 18:32:27 crc kubenswrapper[4935]: I1201 18:32:27.089518 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 18:32:27 crc kubenswrapper[4935]: W1201 18:32:27.113600 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod33c0c124_b04a_4219_bbc1_f57e2c0eb65f.slice/crio-c035ecc2ee40703fe19c427f824536df5cabdd1d4c54b1a9024240c6e5f1bbc8 WatchSource:0}: Error finding container c035ecc2ee40703fe19c427f824536df5cabdd1d4c54b1a9024240c6e5f1bbc8: Status 404 returned error can't find the container with id c035ecc2ee40703fe19c427f824536df5cabdd1d4c54b1a9024240c6e5f1bbc8 Dec 01 18:32:27 crc kubenswrapper[4935]: I1201 18:32:27.361717 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ad06da62-67e0-4ee7-930b-e91126f164cc","Type":"ContainerDied","Data":"1bcbbd5e99ff56efe151cf3b1b52663de813cb6d92a7923b2d63871ba795005e"} Dec 01 18:32:27 crc kubenswrapper[4935]: I1201 18:32:27.362068 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1bcbbd5e99ff56efe151cf3b1b52663de813cb6d92a7923b2d63871ba795005e" Dec 01 18:32:27 crc kubenswrapper[4935]: I1201 18:32:27.361736 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 18:32:27 crc kubenswrapper[4935]: I1201 18:32:27.382728 4935 generic.go:334] "Generic (PLEG): container finished" podID="89a33869-1bc5-478d-bf66-8d6cde7e4991" containerID="337957ee86fc92f9199aa4685dfa7a94e1ae5ba07730cfe0f63ac6c416723874" exitCode=0 Dec 01 18:32:27 crc kubenswrapper[4935]: I1201 18:32:27.383045 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nwkm" event={"ID":"89a33869-1bc5-478d-bf66-8d6cde7e4991","Type":"ContainerDied","Data":"337957ee86fc92f9199aa4685dfa7a94e1ae5ba07730cfe0f63ac6c416723874"} Dec 01 18:32:27 crc kubenswrapper[4935]: I1201 18:32:27.390493 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"33c0c124-b04a-4219-bbc1-f57e2c0eb65f","Type":"ContainerStarted","Data":"c035ecc2ee40703fe19c427f824536df5cabdd1d4c54b1a9024240c6e5f1bbc8"} Dec 01 18:32:28 crc kubenswrapper[4935]: I1201 18:32:28.440777 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"33c0c124-b04a-4219-bbc1-f57e2c0eb65f","Type":"ContainerStarted","Data":"f89c3161e69bf9e1d6388fffabd13f8529810b18859f5b07ed07b6f4b2ad8c86"} Dec 01 18:32:29 crc kubenswrapper[4935]: I1201 18:32:29.322867 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-kzzxw" Dec 01 18:32:29 crc kubenswrapper[4935]: I1201 18:32:29.344108 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.344085395 podStartE2EDuration="3.344085395s" podCreationTimestamp="2025-12-01 18:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:28.46781429 +0000 UTC m=+162.489443549" watchObservedRunningTime="2025-12-01 18:32:29.344085395 +0000 UTC m=+163.365714654" Dec 01 18:32:29 crc kubenswrapper[4935]: I1201 18:32:29.482175 4935 generic.go:334] "Generic (PLEG): container finished" podID="33c0c124-b04a-4219-bbc1-f57e2c0eb65f" containerID="f89c3161e69bf9e1d6388fffabd13f8529810b18859f5b07ed07b6f4b2ad8c86" exitCode=0 Dec 01 18:32:29 crc kubenswrapper[4935]: I1201 18:32:29.482243 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"33c0c124-b04a-4219-bbc1-f57e2c0eb65f","Type":"ContainerDied","Data":"f89c3161e69bf9e1d6388fffabd13f8529810b18859f5b07ed07b6f4b2ad8c86"} Dec 01 18:32:30 crc kubenswrapper[4935]: I1201 18:32:30.956903 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 18:32:31 crc kubenswrapper[4935]: I1201 18:32:31.064075 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33c0c124-b04a-4219-bbc1-f57e2c0eb65f-kube-api-access\") pod \"33c0c124-b04a-4219-bbc1-f57e2c0eb65f\" (UID: \"33c0c124-b04a-4219-bbc1-f57e2c0eb65f\") " Dec 01 18:32:31 crc kubenswrapper[4935]: I1201 18:32:31.064200 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33c0c124-b04a-4219-bbc1-f57e2c0eb65f-kubelet-dir\") pod \"33c0c124-b04a-4219-bbc1-f57e2c0eb65f\" (UID: \"33c0c124-b04a-4219-bbc1-f57e2c0eb65f\") " Dec 01 18:32:31 crc kubenswrapper[4935]: I1201 18:32:31.064602 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/33c0c124-b04a-4219-bbc1-f57e2c0eb65f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "33c0c124-b04a-4219-bbc1-f57e2c0eb65f" (UID: "33c0c124-b04a-4219-bbc1-f57e2c0eb65f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:32:31 crc kubenswrapper[4935]: I1201 18:32:31.076357 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33c0c124-b04a-4219-bbc1-f57e2c0eb65f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "33c0c124-b04a-4219-bbc1-f57e2c0eb65f" (UID: "33c0c124-b04a-4219-bbc1-f57e2c0eb65f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:32:31 crc kubenswrapper[4935]: I1201 18:32:31.165773 4935 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/33c0c124-b04a-4219-bbc1-f57e2c0eb65f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 18:32:31 crc kubenswrapper[4935]: I1201 18:32:31.165817 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/33c0c124-b04a-4219-bbc1-f57e2c0eb65f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 18:32:31 crc kubenswrapper[4935]: I1201 18:32:31.517183 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"33c0c124-b04a-4219-bbc1-f57e2c0eb65f","Type":"ContainerDied","Data":"c035ecc2ee40703fe19c427f824536df5cabdd1d4c54b1a9024240c6e5f1bbc8"} Dec 01 18:32:31 crc kubenswrapper[4935]: I1201 18:32:31.517236 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c035ecc2ee40703fe19c427f824536df5cabdd1d4c54b1a9024240c6e5f1bbc8" Dec 01 18:32:31 crc kubenswrapper[4935]: I1201 18:32:31.517261 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 18:32:32 crc kubenswrapper[4935]: I1201 18:32:32.462162 4935 patch_prober.go:28] interesting pod/console-f9d7485db-dbvg7 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Dec 01 18:32:32 crc kubenswrapper[4935]: I1201 18:32:32.462698 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dbvg7" podUID="6881ae5d-31b3-4749-bd1a-db65599d48d3" containerName="console" probeResult="failure" output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" Dec 01 18:32:34 crc kubenswrapper[4935]: I1201 18:32:34.109230 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:32:34 crc kubenswrapper[4935]: I1201 18:32:34.115119 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a3c94c79-953e-4cac-b6c4-e98aeef74928-metrics-certs\") pod \"network-metrics-daemon-8jhtj\" (UID: \"a3c94c79-953e-4cac-b6c4-e98aeef74928\") " pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:32:34 crc kubenswrapper[4935]: I1201 18:32:34.352524 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8jhtj" Dec 01 18:32:34 crc kubenswrapper[4935]: I1201 18:32:34.522567 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-pphhc" Dec 01 18:32:42 crc kubenswrapper[4935]: I1201 18:32:42.003432 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:32:42 crc kubenswrapper[4935]: I1201 18:32:42.467850 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:42 crc kubenswrapper[4935]: I1201 18:32:42.474502 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:32:52 crc kubenswrapper[4935]: E1201 18:32:52.483170 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 01 18:32:52 crc kubenswrapper[4935]: E1201 18:32:52.484493 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cd7zv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-gsqq5_openshift-marketplace(bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 18:32:52 crc kubenswrapper[4935]: E1201 18:32:52.486002 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-gsqq5" podUID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" Dec 01 18:32:52 crc kubenswrapper[4935]: I1201 18:32:52.733270 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 18:32:52 crc kubenswrapper[4935]: E1201 18:32:52.743680 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 01 18:32:52 crc kubenswrapper[4935]: E1201 18:32:52.743932 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sfcrp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-zpnpj_openshift-marketplace(0efae526-1f2b-44b5-b69e-64af2f426aa8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 18:32:52 crc kubenswrapper[4935]: E1201 18:32:52.745328 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-zpnpj" podUID="0efae526-1f2b-44b5-b69e-64af2f426aa8" Dec 01 18:32:53 crc kubenswrapper[4935]: E1201 18:32:53.296226 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-gsqq5" podUID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" Dec 01 18:32:53 crc kubenswrapper[4935]: E1201 18:32:53.354342 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 01 18:32:53 crc kubenswrapper[4935]: E1201 18:32:53.357598 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hgssv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-hxwbt_openshift-marketplace(b23712c2-6e9e-46eb-a875-8db435d4eabc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 18:32:53 crc kubenswrapper[4935]: E1201 18:32:53.358829 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-hxwbt" podUID="b23712c2-6e9e-46eb-a875-8db435d4eabc" Dec 01 18:32:54 crc kubenswrapper[4935]: I1201 18:32:54.345959 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:32:54 crc kubenswrapper[4935]: I1201 18:32:54.346035 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:32:54 crc kubenswrapper[4935]: I1201 18:32:54.591801 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m95dz" Dec 01 18:32:56 crc kubenswrapper[4935]: E1201 18:32:56.983602 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-zpnpj" podUID="0efae526-1f2b-44b5-b69e-64af2f426aa8" Dec 01 18:32:56 crc kubenswrapper[4935]: E1201 18:32:56.983652 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-hxwbt" podUID="b23712c2-6e9e-46eb-a875-8db435d4eabc" Dec 01 18:32:57 crc kubenswrapper[4935]: E1201 18:32:57.069701 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 01 18:32:57 crc kubenswrapper[4935]: E1201 18:32:57.069904 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q6xtv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-7nwkm_openshift-marketplace(89a33869-1bc5-478d-bf66-8d6cde7e4991): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 18:32:57 crc kubenswrapper[4935]: E1201 18:32:57.071107 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-7nwkm" podUID="89a33869-1bc5-478d-bf66-8d6cde7e4991" Dec 01 18:32:57 crc kubenswrapper[4935]: E1201 18:32:57.084184 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 01 18:32:57 crc kubenswrapper[4935]: E1201 18:32:57.084405 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5pf2x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-67krg_openshift-marketplace(91c00d11-75b5-492f-8a4d-74e87a6aa2fe): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 18:32:57 crc kubenswrapper[4935]: E1201 18:32:57.086396 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-67krg" podUID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" Dec 01 18:32:58 crc kubenswrapper[4935]: E1201 18:32:58.761632 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-67krg" podUID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" Dec 01 18:32:58 crc kubenswrapper[4935]: E1201 18:32:58.761655 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-7nwkm" podUID="89a33869-1bc5-478d-bf66-8d6cde7e4991" Dec 01 18:32:58 crc kubenswrapper[4935]: E1201 18:32:58.847997 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 01 18:32:58 crc kubenswrapper[4935]: E1201 18:32:58.849373 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cxqjg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-rtvzn_openshift-marketplace(585dfe75-4262-4b8b-9874-25e51b01cafd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 18:32:58 crc kubenswrapper[4935]: E1201 18:32:58.851328 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 01 18:32:58 crc kubenswrapper[4935]: E1201 18:32:58.851541 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h5cdc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-mvlbn_openshift-marketplace(558a5c77-7d48-4ebf-af67-d42e717939d5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 18:32:58 crc kubenswrapper[4935]: E1201 18:32:58.851658 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 01 18:32:58 crc kubenswrapper[4935]: E1201 18:32:58.851798 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2rmnk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-gkmsh_openshift-marketplace(66550924-3006-4d90-b516-ac5ea6155bbc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 18:32:58 crc kubenswrapper[4935]: E1201 18:32:58.852882 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-gkmsh" podUID="66550924-3006-4d90-b516-ac5ea6155bbc" Dec 01 18:32:58 crc kubenswrapper[4935]: E1201 18:32:58.852968 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-mvlbn" podUID="558a5c77-7d48-4ebf-af67-d42e717939d5" Dec 01 18:32:58 crc kubenswrapper[4935]: E1201 18:32:58.853490 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-rtvzn" podUID="585dfe75-4262-4b8b-9874-25e51b01cafd" Dec 01 18:32:59 crc kubenswrapper[4935]: I1201 18:32:59.155776 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-8jhtj"] Dec 01 18:32:59 crc kubenswrapper[4935]: I1201 18:32:59.731627 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" event={"ID":"a3c94c79-953e-4cac-b6c4-e98aeef74928","Type":"ContainerStarted","Data":"fd295dbfe841496b773a71acf7170a29fba94d889c4a768347eebd7e4e03c811"} Dec 01 18:32:59 crc kubenswrapper[4935]: I1201 18:32:59.732215 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" event={"ID":"a3c94c79-953e-4cac-b6c4-e98aeef74928","Type":"ContainerStarted","Data":"d61e6961e3d5fe991ce9fc5f5cd3e4cab2d1532fbc058332a3e19df6c8fe15ad"} Dec 01 18:32:59 crc kubenswrapper[4935]: I1201 18:32:59.732235 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-8jhtj" event={"ID":"a3c94c79-953e-4cac-b6c4-e98aeef74928","Type":"ContainerStarted","Data":"2f428fde001d4a7e4656f1fa8e2160b93ba10c2d3a37f43dae49f09d09205b19"} Dec 01 18:32:59 crc kubenswrapper[4935]: E1201 18:32:59.732652 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-rtvzn" podUID="585dfe75-4262-4b8b-9874-25e51b01cafd" Dec 01 18:32:59 crc kubenswrapper[4935]: E1201 18:32:59.734127 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-mvlbn" podUID="558a5c77-7d48-4ebf-af67-d42e717939d5" Dec 01 18:32:59 crc kubenswrapper[4935]: E1201 18:32:59.735133 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-gkmsh" podUID="66550924-3006-4d90-b516-ac5ea6155bbc" Dec 01 18:32:59 crc kubenswrapper[4935]: I1201 18:32:59.768744 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-8jhtj" podStartSLOduration=168.768725933 podStartE2EDuration="2m48.768725933s" podCreationTimestamp="2025-12-01 18:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:32:59.764490262 +0000 UTC m=+193.786119531" watchObservedRunningTime="2025-12-01 18:32:59.768725933 +0000 UTC m=+193.790355192" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.591313 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 18:33:06 crc kubenswrapper[4935]: E1201 18:33:06.592309 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33c0c124-b04a-4219-bbc1-f57e2c0eb65f" containerName="pruner" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.592324 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="33c0c124-b04a-4219-bbc1-f57e2c0eb65f" containerName="pruner" Dec 01 18:33:06 crc kubenswrapper[4935]: E1201 18:33:06.592337 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad06da62-67e0-4ee7-930b-e91126f164cc" containerName="pruner" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.592343 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad06da62-67e0-4ee7-930b-e91126f164cc" containerName="pruner" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.592445 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="33c0c124-b04a-4219-bbc1-f57e2c0eb65f" containerName="pruner" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.592455 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad06da62-67e0-4ee7-930b-e91126f164cc" containerName="pruner" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.592909 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.596767 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.596853 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.600630 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.687305 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1b45a20b-b5f5-469f-b16d-7fb20b5686a7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1b45a20b-b5f5-469f-b16d-7fb20b5686a7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.687376 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1b45a20b-b5f5-469f-b16d-7fb20b5686a7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1b45a20b-b5f5-469f-b16d-7fb20b5686a7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.778955 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsqq5" event={"ID":"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a","Type":"ContainerStarted","Data":"7a4879d2c5434fd5b1daa08da5c2fb89b247a37817265f394c26c90f6895bc14"} Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.789372 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1b45a20b-b5f5-469f-b16d-7fb20b5686a7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1b45a20b-b5f5-469f-b16d-7fb20b5686a7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.789496 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1b45a20b-b5f5-469f-b16d-7fb20b5686a7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1b45a20b-b5f5-469f-b16d-7fb20b5686a7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.789521 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1b45a20b-b5f5-469f-b16d-7fb20b5686a7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1b45a20b-b5f5-469f-b16d-7fb20b5686a7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.814885 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1b45a20b-b5f5-469f-b16d-7fb20b5686a7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1b45a20b-b5f5-469f-b16d-7fb20b5686a7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 18:33:06 crc kubenswrapper[4935]: I1201 18:33:06.913918 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 18:33:07 crc kubenswrapper[4935]: I1201 18:33:07.329573 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 18:33:07 crc kubenswrapper[4935]: I1201 18:33:07.787617 4935 generic.go:334] "Generic (PLEG): container finished" podID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" containerID="7a4879d2c5434fd5b1daa08da5c2fb89b247a37817265f394c26c90f6895bc14" exitCode=0 Dec 01 18:33:07 crc kubenswrapper[4935]: I1201 18:33:07.787757 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsqq5" event={"ID":"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a","Type":"ContainerDied","Data":"7a4879d2c5434fd5b1daa08da5c2fb89b247a37817265f394c26c90f6895bc14"} Dec 01 18:33:07 crc kubenswrapper[4935]: I1201 18:33:07.791997 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1b45a20b-b5f5-469f-b16d-7fb20b5686a7","Type":"ContainerStarted","Data":"c415f4b33e455d6121b7887e43705840292bbc389ce934f8b5d2941aad72f3f7"} Dec 01 18:33:07 crc kubenswrapper[4935]: I1201 18:33:07.792067 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1b45a20b-b5f5-469f-b16d-7fb20b5686a7","Type":"ContainerStarted","Data":"4f4ea5d5b6e90429e9b33837f89d0a3b33dc7abf130a81d8ac2d91aceeda4bae"} Dec 01 18:33:07 crc kubenswrapper[4935]: I1201 18:33:07.830373 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=1.830348791 podStartE2EDuration="1.830348791s" podCreationTimestamp="2025-12-01 18:33:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:33:07.826559243 +0000 UTC m=+201.848188502" watchObservedRunningTime="2025-12-01 18:33:07.830348791 +0000 UTC m=+201.851978050" Dec 01 18:33:08 crc kubenswrapper[4935]: I1201 18:33:08.799051 4935 generic.go:334] "Generic (PLEG): container finished" podID="1b45a20b-b5f5-469f-b16d-7fb20b5686a7" containerID="c415f4b33e455d6121b7887e43705840292bbc389ce934f8b5d2941aad72f3f7" exitCode=0 Dec 01 18:33:08 crc kubenswrapper[4935]: I1201 18:33:08.799128 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1b45a20b-b5f5-469f-b16d-7fb20b5686a7","Type":"ContainerDied","Data":"c415f4b33e455d6121b7887e43705840292bbc389ce934f8b5d2941aad72f3f7"} Dec 01 18:33:08 crc kubenswrapper[4935]: I1201 18:33:08.803762 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsqq5" event={"ID":"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a","Type":"ContainerStarted","Data":"01be97d859d0ed8f258d522e337a6f36b38e01a883402c1b3749c12d82db27d4"} Dec 01 18:33:08 crc kubenswrapper[4935]: I1201 18:33:08.837731 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gsqq5" podStartSLOduration=2.613268373 podStartE2EDuration="46.837708441s" podCreationTimestamp="2025-12-01 18:32:22 +0000 UTC" firstStartedPulling="2025-12-01 18:32:24.288430689 +0000 UTC m=+158.310059948" lastFinishedPulling="2025-12-01 18:33:08.512870757 +0000 UTC m=+202.534500016" observedRunningTime="2025-12-01 18:33:08.83508356 +0000 UTC m=+202.856712819" watchObservedRunningTime="2025-12-01 18:33:08.837708441 +0000 UTC m=+202.859337700" Dec 01 18:33:09 crc kubenswrapper[4935]: I1201 18:33:09.811713 4935 generic.go:334] "Generic (PLEG): container finished" podID="b23712c2-6e9e-46eb-a875-8db435d4eabc" containerID="ca318be41c49375635a1e6a10ea7ff5ed3603c670dd3a86336687bbe045d68e3" exitCode=0 Dec 01 18:33:09 crc kubenswrapper[4935]: I1201 18:33:09.811793 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxwbt" event={"ID":"b23712c2-6e9e-46eb-a875-8db435d4eabc","Type":"ContainerDied","Data":"ca318be41c49375635a1e6a10ea7ff5ed3603c670dd3a86336687bbe045d68e3"} Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.082110 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.242846 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1b45a20b-b5f5-469f-b16d-7fb20b5686a7-kubelet-dir\") pod \"1b45a20b-b5f5-469f-b16d-7fb20b5686a7\" (UID: \"1b45a20b-b5f5-469f-b16d-7fb20b5686a7\") " Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.242948 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1b45a20b-b5f5-469f-b16d-7fb20b5686a7-kube-api-access\") pod \"1b45a20b-b5f5-469f-b16d-7fb20b5686a7\" (UID: \"1b45a20b-b5f5-469f-b16d-7fb20b5686a7\") " Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.242995 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1b45a20b-b5f5-469f-b16d-7fb20b5686a7-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1b45a20b-b5f5-469f-b16d-7fb20b5686a7" (UID: "1b45a20b-b5f5-469f-b16d-7fb20b5686a7"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.243573 4935 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1b45a20b-b5f5-469f-b16d-7fb20b5686a7-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.253419 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b45a20b-b5f5-469f-b16d-7fb20b5686a7-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1b45a20b-b5f5-469f-b16d-7fb20b5686a7" (UID: "1b45a20b-b5f5-469f-b16d-7fb20b5686a7"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.344811 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1b45a20b-b5f5-469f-b16d-7fb20b5686a7-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.819676 4935 generic.go:334] "Generic (PLEG): container finished" podID="0efae526-1f2b-44b5-b69e-64af2f426aa8" containerID="0d11602c8e73c85c5beda213c52e1daacfcc3bdcc9dff6c0420e20925dda6664" exitCode=0 Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.819763 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zpnpj" event={"ID":"0efae526-1f2b-44b5-b69e-64af2f426aa8","Type":"ContainerDied","Data":"0d11602c8e73c85c5beda213c52e1daacfcc3bdcc9dff6c0420e20925dda6664"} Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.824745 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxwbt" event={"ID":"b23712c2-6e9e-46eb-a875-8db435d4eabc","Type":"ContainerStarted","Data":"4dc8bb499a830a9f79ac1c5f725fe553232e388108265c4e97820de130063ccf"} Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.827110 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1b45a20b-b5f5-469f-b16d-7fb20b5686a7","Type":"ContainerDied","Data":"4f4ea5d5b6e90429e9b33837f89d0a3b33dc7abf130a81d8ac2d91aceeda4bae"} Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.827177 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f4ea5d5b6e90429e9b33837f89d0a3b33dc7abf130a81d8ac2d91aceeda4bae" Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.827359 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 18:33:10 crc kubenswrapper[4935]: I1201 18:33:10.863218 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hxwbt" podStartSLOduration=1.620598792 podStartE2EDuration="46.863191065s" podCreationTimestamp="2025-12-01 18:32:24 +0000 UTC" firstStartedPulling="2025-12-01 18:32:25.32773194 +0000 UTC m=+159.349361199" lastFinishedPulling="2025-12-01 18:33:10.570324213 +0000 UTC m=+204.591953472" observedRunningTime="2025-12-01 18:33:10.860844112 +0000 UTC m=+204.882473371" watchObservedRunningTime="2025-12-01 18:33:10.863191065 +0000 UTC m=+204.884820334" Dec 01 18:33:11 crc kubenswrapper[4935]: I1201 18:33:11.837736 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nwkm" event={"ID":"89a33869-1bc5-478d-bf66-8d6cde7e4991","Type":"ContainerStarted","Data":"cbf7c28055f80176d7171480f54b8687153209ec9dad4a0ce10395dcee5890d6"} Dec 01 18:33:11 crc kubenswrapper[4935]: I1201 18:33:11.856125 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zpnpj" event={"ID":"0efae526-1f2b-44b5-b69e-64af2f426aa8","Type":"ContainerStarted","Data":"eb5b1a9759a2ca834161f020c773fb4a6a62bec4451fedbdc489470927250dcc"} Dec 01 18:33:11 crc kubenswrapper[4935]: I1201 18:33:11.904801 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zpnpj" podStartSLOduration=2.748365009 podStartE2EDuration="50.904779516s" podCreationTimestamp="2025-12-01 18:32:21 +0000 UTC" firstStartedPulling="2025-12-01 18:32:23.246062543 +0000 UTC m=+157.267691802" lastFinishedPulling="2025-12-01 18:33:11.40247705 +0000 UTC m=+205.424106309" observedRunningTime="2025-12-01 18:33:11.900914207 +0000 UTC m=+205.922543466" watchObservedRunningTime="2025-12-01 18:33:11.904779516 +0000 UTC m=+205.926408785" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.136566 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.137273 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.190952 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 18:33:12 crc kubenswrapper[4935]: E1201 18:33:12.191307 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b45a20b-b5f5-469f-b16d-7fb20b5686a7" containerName="pruner" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.191323 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b45a20b-b5f5-469f-b16d-7fb20b5686a7" containerName="pruner" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.191460 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b45a20b-b5f5-469f-b16d-7fb20b5686a7" containerName="pruner" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.192009 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.195234 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.195515 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.204993 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.373626 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-kubelet-dir\") pod \"installer-9-crc\" (UID: \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.373997 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-kube-api-access\") pod \"installer-9-crc\" (UID: \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.374094 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-var-lock\") pod \"installer-9-crc\" (UID: \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.475308 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-kubelet-dir\") pod \"installer-9-crc\" (UID: \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.475410 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-kube-api-access\") pod \"installer-9-crc\" (UID: \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.475440 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-kubelet-dir\") pod \"installer-9-crc\" (UID: \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.475446 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-var-lock\") pod \"installer-9-crc\" (UID: \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.475489 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-var-lock\") pod \"installer-9-crc\" (UID: \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.496448 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-kube-api-access\") pod \"installer-9-crc\" (UID: \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.597582 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.617588 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.617711 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.669113 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.866458 4935 generic.go:334] "Generic (PLEG): container finished" podID="89a33869-1bc5-478d-bf66-8d6cde7e4991" containerID="cbf7c28055f80176d7171480f54b8687153209ec9dad4a0ce10395dcee5890d6" exitCode=0 Dec 01 18:33:12 crc kubenswrapper[4935]: I1201 18:33:12.866574 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nwkm" event={"ID":"89a33869-1bc5-478d-bf66-8d6cde7e4991","Type":"ContainerDied","Data":"cbf7c28055f80176d7171480f54b8687153209ec9dad4a0ce10395dcee5890d6"} Dec 01 18:33:13 crc kubenswrapper[4935]: I1201 18:33:13.036190 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 18:33:13 crc kubenswrapper[4935]: W1201 18:33:13.045414 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod68d1fe1e_75ec_4e41_bd04_f90ab04c7a97.slice/crio-b68cc3333448c1a602faf0ffe0db614d8f56e9b0a373d25d4cbd2900631f919d WatchSource:0}: Error finding container b68cc3333448c1a602faf0ffe0db614d8f56e9b0a373d25d4cbd2900631f919d: Status 404 returned error can't find the container with id b68cc3333448c1a602faf0ffe0db614d8f56e9b0a373d25d4cbd2900631f919d Dec 01 18:33:13 crc kubenswrapper[4935]: I1201 18:33:13.267509 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-zpnpj" podUID="0efae526-1f2b-44b5-b69e-64af2f426aa8" containerName="registry-server" probeResult="failure" output=< Dec 01 18:33:13 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 18:33:13 crc kubenswrapper[4935]: > Dec 01 18:33:13 crc kubenswrapper[4935]: I1201 18:33:13.875036 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nwkm" event={"ID":"89a33869-1bc5-478d-bf66-8d6cde7e4991","Type":"ContainerStarted","Data":"815f8ad0e03cadaf6aa668d0b7f6d37de25f6406b536a16f10bd3c1469d1086e"} Dec 01 18:33:13 crc kubenswrapper[4935]: I1201 18:33:13.876531 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97","Type":"ContainerStarted","Data":"ba74e1034fe339e27be5a1a4dde7a0c7b0405f0258e14481d31c32f60f6e08ac"} Dec 01 18:33:13 crc kubenswrapper[4935]: I1201 18:33:13.876614 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97","Type":"ContainerStarted","Data":"b68cc3333448c1a602faf0ffe0db614d8f56e9b0a373d25d4cbd2900631f919d"} Dec 01 18:33:13 crc kubenswrapper[4935]: I1201 18:33:13.902870 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7nwkm" podStartSLOduration=2.983379627 podStartE2EDuration="48.902834561s" podCreationTimestamp="2025-12-01 18:32:25 +0000 UTC" firstStartedPulling="2025-12-01 18:32:27.386486446 +0000 UTC m=+161.408115705" lastFinishedPulling="2025-12-01 18:33:13.30594139 +0000 UTC m=+207.327570639" observedRunningTime="2025-12-01 18:33:13.895709729 +0000 UTC m=+207.917338998" watchObservedRunningTime="2025-12-01 18:33:13.902834561 +0000 UTC m=+207.924463830" Dec 01 18:33:13 crc kubenswrapper[4935]: I1201 18:33:13.923134 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=1.923111019 podStartE2EDuration="1.923111019s" podCreationTimestamp="2025-12-01 18:33:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:33:13.922459519 +0000 UTC m=+207.944088778" watchObservedRunningTime="2025-12-01 18:33:13.923111019 +0000 UTC m=+207.944740278" Dec 01 18:33:13 crc kubenswrapper[4935]: I1201 18:33:13.932881 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:33:14 crc kubenswrapper[4935]: I1201 18:33:14.564313 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:33:14 crc kubenswrapper[4935]: I1201 18:33:14.564386 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:33:14 crc kubenswrapper[4935]: I1201 18:33:14.608053 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:33:15 crc kubenswrapper[4935]: I1201 18:33:15.744346 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:33:15 crc kubenswrapper[4935]: I1201 18:33:15.744875 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:33:15 crc kubenswrapper[4935]: I1201 18:33:15.894132 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67krg" event={"ID":"91c00d11-75b5-492f-8a4d-74e87a6aa2fe","Type":"ContainerStarted","Data":"9f42d8e7bf84f170089d0de6dd66d05f72d5cbfebf19963b7181a269005483e7"} Dec 01 18:33:16 crc kubenswrapper[4935]: I1201 18:33:16.786381 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7nwkm" podUID="89a33869-1bc5-478d-bf66-8d6cde7e4991" containerName="registry-server" probeResult="failure" output=< Dec 01 18:33:16 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 18:33:16 crc kubenswrapper[4935]: > Dec 01 18:33:16 crc kubenswrapper[4935]: I1201 18:33:16.831138 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gsqq5"] Dec 01 18:33:16 crc kubenswrapper[4935]: I1201 18:33:16.831445 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gsqq5" podUID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" containerName="registry-server" containerID="cri-o://01be97d859d0ed8f258d522e337a6f36b38e01a883402c1b3749c12d82db27d4" gracePeriod=2 Dec 01 18:33:16 crc kubenswrapper[4935]: I1201 18:33:16.904021 4935 generic.go:334] "Generic (PLEG): container finished" podID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" containerID="9f42d8e7bf84f170089d0de6dd66d05f72d5cbfebf19963b7181a269005483e7" exitCode=0 Dec 01 18:33:16 crc kubenswrapper[4935]: I1201 18:33:16.904084 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67krg" event={"ID":"91c00d11-75b5-492f-8a4d-74e87a6aa2fe","Type":"ContainerDied","Data":"9f42d8e7bf84f170089d0de6dd66d05f72d5cbfebf19963b7181a269005483e7"} Dec 01 18:33:18 crc kubenswrapper[4935]: I1201 18:33:18.923265 4935 generic.go:334] "Generic (PLEG): container finished" podID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" containerID="01be97d859d0ed8f258d522e337a6f36b38e01a883402c1b3749c12d82db27d4" exitCode=0 Dec 01 18:33:18 crc kubenswrapper[4935]: I1201 18:33:18.923346 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsqq5" event={"ID":"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a","Type":"ContainerDied","Data":"01be97d859d0ed8f258d522e337a6f36b38e01a883402c1b3749c12d82db27d4"} Dec 01 18:33:21 crc kubenswrapper[4935]: I1201 18:33:21.832859 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:33:21 crc kubenswrapper[4935]: I1201 18:33:21.950755 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsqq5" event={"ID":"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a","Type":"ContainerDied","Data":"9f7ba5bc3f73af729e83d470204f9609cadac064295309a42ecec5564c7c109c"} Dec 01 18:33:21 crc kubenswrapper[4935]: I1201 18:33:21.950826 4935 scope.go:117] "RemoveContainer" containerID="01be97d859d0ed8f258d522e337a6f36b38e01a883402c1b3749c12d82db27d4" Dec 01 18:33:21 crc kubenswrapper[4935]: I1201 18:33:21.950869 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gsqq5" Dec 01 18:33:21 crc kubenswrapper[4935]: I1201 18:33:21.958284 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rtvzn" event={"ID":"585dfe75-4262-4b8b-9874-25e51b01cafd","Type":"ContainerStarted","Data":"9a6d55e8887feb1956d1914b00093b6344b49f99f67ecd0822826635bf913464"} Dec 01 18:33:21 crc kubenswrapper[4935]: I1201 18:33:21.972699 4935 scope.go:117] "RemoveContainer" containerID="7a4879d2c5434fd5b1daa08da5c2fb89b247a37817265f394c26c90f6895bc14" Dec 01 18:33:21 crc kubenswrapper[4935]: I1201 18:33:21.992415 4935 scope.go:117] "RemoveContainer" containerID="099e4680cab07777b7f828dda3b53d8dbbcdd1cfb72c998b24d54ffb79345c93" Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.029007 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-catalog-content\") pod \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\" (UID: \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\") " Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.029141 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-utilities\") pod \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\" (UID: \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\") " Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.029196 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cd7zv\" (UniqueName: \"kubernetes.io/projected/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-kube-api-access-cd7zv\") pod \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\" (UID: \"bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a\") " Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.030431 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-utilities" (OuterVolumeSpecName: "utilities") pod "bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" (UID: "bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.036035 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-kube-api-access-cd7zv" (OuterVolumeSpecName: "kube-api-access-cd7zv") pod "bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" (UID: "bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a"). InnerVolumeSpecName "kube-api-access-cd7zv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.078269 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" (UID: "bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.132078 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.132633 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.132654 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cd7zv\" (UniqueName: \"kubernetes.io/projected/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a-kube-api-access-cd7zv\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.182084 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.231645 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.326969 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gsqq5"] Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.330874 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gsqq5"] Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.534995 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" path="/var/lib/kubelet/pods/bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a/volumes" Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.972265 4935 generic.go:334] "Generic (PLEG): container finished" podID="585dfe75-4262-4b8b-9874-25e51b01cafd" containerID="9a6d55e8887feb1956d1914b00093b6344b49f99f67ecd0822826635bf913464" exitCode=0 Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.972352 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rtvzn" event={"ID":"585dfe75-4262-4b8b-9874-25e51b01cafd","Type":"ContainerDied","Data":"9a6d55e8887feb1956d1914b00093b6344b49f99f67ecd0822826635bf913464"} Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.976724 4935 generic.go:334] "Generic (PLEG): container finished" podID="558a5c77-7d48-4ebf-af67-d42e717939d5" containerID="c0c37e49dbc23ff40481eeec882d8ebd9b8bfd608c984d9d305fa6fc85ac48ad" exitCode=0 Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.976780 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvlbn" event={"ID":"558a5c77-7d48-4ebf-af67-d42e717939d5","Type":"ContainerDied","Data":"c0c37e49dbc23ff40481eeec882d8ebd9b8bfd608c984d9d305fa6fc85ac48ad"} Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.981067 4935 generic.go:334] "Generic (PLEG): container finished" podID="66550924-3006-4d90-b516-ac5ea6155bbc" containerID="4e5b1a9ae17aef3ee3175d13dd1ed544ca3cd2119d36c393b420e43640409237" exitCode=0 Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.981108 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gkmsh" event={"ID":"66550924-3006-4d90-b516-ac5ea6155bbc","Type":"ContainerDied","Data":"4e5b1a9ae17aef3ee3175d13dd1ed544ca3cd2119d36c393b420e43640409237"} Dec 01 18:33:22 crc kubenswrapper[4935]: I1201 18:33:22.991504 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67krg" event={"ID":"91c00d11-75b5-492f-8a4d-74e87a6aa2fe","Type":"ContainerStarted","Data":"c8a5232e055bf97a008a501873130f3f70293cbf8f4a9d5cb202527b62494984"} Dec 01 18:33:23 crc kubenswrapper[4935]: I1201 18:33:23.021359 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-67krg" podStartSLOduration=3.564430335 podStartE2EDuration="59.021332824s" podCreationTimestamp="2025-12-01 18:32:24 +0000 UTC" firstStartedPulling="2025-12-01 18:32:26.345577355 +0000 UTC m=+160.367206614" lastFinishedPulling="2025-12-01 18:33:21.802479804 +0000 UTC m=+215.824109103" observedRunningTime="2025-12-01 18:33:23.018833785 +0000 UTC m=+217.040463044" watchObservedRunningTime="2025-12-01 18:33:23.021332824 +0000 UTC m=+217.042962083" Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.000899 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rtvzn" event={"ID":"585dfe75-4262-4b8b-9874-25e51b01cafd","Type":"ContainerStarted","Data":"875792e43fb296094376806a73a83a2027f50819390fe61d061a348ee9c6ec96"} Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.003174 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvlbn" event={"ID":"558a5c77-7d48-4ebf-af67-d42e717939d5","Type":"ContainerStarted","Data":"9677ce911b264996a09ad84abe438c60743f4b799ef3a6b8afa3ac30ba65e652"} Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.005540 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gkmsh" event={"ID":"66550924-3006-4d90-b516-ac5ea6155bbc","Type":"ContainerStarted","Data":"70a9b3ee46e1c2d71b49774c53fa5584addba00784142d22d9ff8d7c9f3cff8d"} Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.027456 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rtvzn" podStartSLOduration=2.830888003 podStartE2EDuration="1m1.027425624s" podCreationTimestamp="2025-12-01 18:32:23 +0000 UTC" firstStartedPulling="2025-12-01 18:32:25.322738715 +0000 UTC m=+159.344367964" lastFinishedPulling="2025-12-01 18:33:23.519276286 +0000 UTC m=+217.540905585" observedRunningTime="2025-12-01 18:33:24.024604327 +0000 UTC m=+218.046233596" watchObservedRunningTime="2025-12-01 18:33:24.027425624 +0000 UTC m=+218.049054883" Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.059714 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mvlbn" podStartSLOduration=2.671479018 podStartE2EDuration="1m2.059694875s" podCreationTimestamp="2025-12-01 18:32:22 +0000 UTC" firstStartedPulling="2025-12-01 18:32:24.265270821 +0000 UTC m=+158.286900100" lastFinishedPulling="2025-12-01 18:33:23.653486678 +0000 UTC m=+217.675115957" observedRunningTime="2025-12-01 18:33:24.056840577 +0000 UTC m=+218.078469846" watchObservedRunningTime="2025-12-01 18:33:24.059694875 +0000 UTC m=+218.081324134" Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.082170 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gkmsh" podStartSLOduration=3.857779395 podStartE2EDuration="1m3.082131971s" podCreationTimestamp="2025-12-01 18:32:21 +0000 UTC" firstStartedPulling="2025-12-01 18:32:24.283997711 +0000 UTC m=+158.305626970" lastFinishedPulling="2025-12-01 18:33:23.508350247 +0000 UTC m=+217.529979546" observedRunningTime="2025-12-01 18:33:24.077325451 +0000 UTC m=+218.098954710" watchObservedRunningTime="2025-12-01 18:33:24.082131971 +0000 UTC m=+218.103761220" Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.207979 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.208056 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.346841 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.346924 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.346981 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.347821 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.347953 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05" gracePeriod=600 Dec 01 18:33:24 crc kubenswrapper[4935]: I1201 18:33:24.634170 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:33:25 crc kubenswrapper[4935]: I1201 18:33:25.013219 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05" exitCode=0 Dec 01 18:33:25 crc kubenswrapper[4935]: I1201 18:33:25.013314 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05"} Dec 01 18:33:25 crc kubenswrapper[4935]: I1201 18:33:25.013773 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"ba70f74e54e1786deaed12104c295d7b917d7f0c9ecd296020b6c7c70c481193"} Dec 01 18:33:25 crc kubenswrapper[4935]: I1201 18:33:25.256799 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-rtvzn" podUID="585dfe75-4262-4b8b-9874-25e51b01cafd" containerName="registry-server" probeResult="failure" output=< Dec 01 18:33:25 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 18:33:25 crc kubenswrapper[4935]: > Dec 01 18:33:25 crc kubenswrapper[4935]: I1201 18:33:25.409490 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:33:25 crc kubenswrapper[4935]: I1201 18:33:25.409569 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:33:25 crc kubenswrapper[4935]: I1201 18:33:25.785223 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:33:25 crc kubenswrapper[4935]: I1201 18:33:25.847676 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:33:26 crc kubenswrapper[4935]: I1201 18:33:26.429597 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxwbt"] Dec 01 18:33:26 crc kubenswrapper[4935]: I1201 18:33:26.429929 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hxwbt" podUID="b23712c2-6e9e-46eb-a875-8db435d4eabc" containerName="registry-server" containerID="cri-o://4dc8bb499a830a9f79ac1c5f725fe553232e388108265c4e97820de130063ccf" gracePeriod=2 Dec 01 18:33:26 crc kubenswrapper[4935]: I1201 18:33:26.460919 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-67krg" podUID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" containerName="registry-server" probeResult="failure" output=< Dec 01 18:33:26 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 18:33:26 crc kubenswrapper[4935]: > Dec 01 18:33:26 crc kubenswrapper[4935]: I1201 18:33:26.632775 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7nwkm"] Dec 01 18:33:27 crc kubenswrapper[4935]: I1201 18:33:27.050255 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7nwkm" podUID="89a33869-1bc5-478d-bf66-8d6cde7e4991" containerName="registry-server" containerID="cri-o://815f8ad0e03cadaf6aa668d0b7f6d37de25f6406b536a16f10bd3c1469d1086e" gracePeriod=2 Dec 01 18:33:28 crc kubenswrapper[4935]: I1201 18:33:28.993618 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.066294 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.071768 4935 generic.go:334] "Generic (PLEG): container finished" podID="b23712c2-6e9e-46eb-a875-8db435d4eabc" containerID="4dc8bb499a830a9f79ac1c5f725fe553232e388108265c4e97820de130063ccf" exitCode=0 Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.071849 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hxwbt" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.071901 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxwbt" event={"ID":"b23712c2-6e9e-46eb-a875-8db435d4eabc","Type":"ContainerDied","Data":"4dc8bb499a830a9f79ac1c5f725fe553232e388108265c4e97820de130063ccf"} Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.071952 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxwbt" event={"ID":"b23712c2-6e9e-46eb-a875-8db435d4eabc","Type":"ContainerDied","Data":"e5008744dc7e69e631e1a074671cee0e6da22f6952a487d349a304b080533cce"} Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.071986 4935 scope.go:117] "RemoveContainer" containerID="4dc8bb499a830a9f79ac1c5f725fe553232e388108265c4e97820de130063ccf" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.074629 4935 generic.go:334] "Generic (PLEG): container finished" podID="89a33869-1bc5-478d-bf66-8d6cde7e4991" containerID="815f8ad0e03cadaf6aa668d0b7f6d37de25f6406b536a16f10bd3c1469d1086e" exitCode=0 Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.074665 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7nwkm" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.074667 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nwkm" event={"ID":"89a33869-1bc5-478d-bf66-8d6cde7e4991","Type":"ContainerDied","Data":"815f8ad0e03cadaf6aa668d0b7f6d37de25f6406b536a16f10bd3c1469d1086e"} Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.074772 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7nwkm" event={"ID":"89a33869-1bc5-478d-bf66-8d6cde7e4991","Type":"ContainerDied","Data":"96f80e6da0a614e707b7f2b609c3738879ff06808640e66331e9df0ae1389b7e"} Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.094237 4935 scope.go:117] "RemoveContainer" containerID="ca318be41c49375635a1e6a10ea7ff5ed3603c670dd3a86336687bbe045d68e3" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.114919 4935 scope.go:117] "RemoveContainer" containerID="a57bd30b9398fce26a4ba3f59a0984afb2bbb4c7d98c4877e2d6d7c789768150" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.130948 4935 scope.go:117] "RemoveContainer" containerID="4dc8bb499a830a9f79ac1c5f725fe553232e388108265c4e97820de130063ccf" Dec 01 18:33:29 crc kubenswrapper[4935]: E1201 18:33:29.131999 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dc8bb499a830a9f79ac1c5f725fe553232e388108265c4e97820de130063ccf\": container with ID starting with 4dc8bb499a830a9f79ac1c5f725fe553232e388108265c4e97820de130063ccf not found: ID does not exist" containerID="4dc8bb499a830a9f79ac1c5f725fe553232e388108265c4e97820de130063ccf" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.132050 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dc8bb499a830a9f79ac1c5f725fe553232e388108265c4e97820de130063ccf"} err="failed to get container status \"4dc8bb499a830a9f79ac1c5f725fe553232e388108265c4e97820de130063ccf\": rpc error: code = NotFound desc = could not find container \"4dc8bb499a830a9f79ac1c5f725fe553232e388108265c4e97820de130063ccf\": container with ID starting with 4dc8bb499a830a9f79ac1c5f725fe553232e388108265c4e97820de130063ccf not found: ID does not exist" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.132083 4935 scope.go:117] "RemoveContainer" containerID="ca318be41c49375635a1e6a10ea7ff5ed3603c670dd3a86336687bbe045d68e3" Dec 01 18:33:29 crc kubenswrapper[4935]: E1201 18:33:29.132524 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca318be41c49375635a1e6a10ea7ff5ed3603c670dd3a86336687bbe045d68e3\": container with ID starting with ca318be41c49375635a1e6a10ea7ff5ed3603c670dd3a86336687bbe045d68e3 not found: ID does not exist" containerID="ca318be41c49375635a1e6a10ea7ff5ed3603c670dd3a86336687bbe045d68e3" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.132573 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca318be41c49375635a1e6a10ea7ff5ed3603c670dd3a86336687bbe045d68e3"} err="failed to get container status \"ca318be41c49375635a1e6a10ea7ff5ed3603c670dd3a86336687bbe045d68e3\": rpc error: code = NotFound desc = could not find container \"ca318be41c49375635a1e6a10ea7ff5ed3603c670dd3a86336687bbe045d68e3\": container with ID starting with ca318be41c49375635a1e6a10ea7ff5ed3603c670dd3a86336687bbe045d68e3 not found: ID does not exist" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.132613 4935 scope.go:117] "RemoveContainer" containerID="a57bd30b9398fce26a4ba3f59a0984afb2bbb4c7d98c4877e2d6d7c789768150" Dec 01 18:33:29 crc kubenswrapper[4935]: E1201 18:33:29.133032 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a57bd30b9398fce26a4ba3f59a0984afb2bbb4c7d98c4877e2d6d7c789768150\": container with ID starting with a57bd30b9398fce26a4ba3f59a0984afb2bbb4c7d98c4877e2d6d7c789768150 not found: ID does not exist" containerID="a57bd30b9398fce26a4ba3f59a0984afb2bbb4c7d98c4877e2d6d7c789768150" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.133083 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a57bd30b9398fce26a4ba3f59a0984afb2bbb4c7d98c4877e2d6d7c789768150"} err="failed to get container status \"a57bd30b9398fce26a4ba3f59a0984afb2bbb4c7d98c4877e2d6d7c789768150\": rpc error: code = NotFound desc = could not find container \"a57bd30b9398fce26a4ba3f59a0984afb2bbb4c7d98c4877e2d6d7c789768150\": container with ID starting with a57bd30b9398fce26a4ba3f59a0984afb2bbb4c7d98c4877e2d6d7c789768150 not found: ID does not exist" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.133132 4935 scope.go:117] "RemoveContainer" containerID="815f8ad0e03cadaf6aa668d0b7f6d37de25f6406b536a16f10bd3c1469d1086e" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.150710 4935 scope.go:117] "RemoveContainer" containerID="cbf7c28055f80176d7171480f54b8687153209ec9dad4a0ce10395dcee5890d6" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.174372 4935 scope.go:117] "RemoveContainer" containerID="337957ee86fc92f9199aa4685dfa7a94e1ae5ba07730cfe0f63ac6c416723874" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.175973 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b23712c2-6e9e-46eb-a875-8db435d4eabc-catalog-content\") pod \"b23712c2-6e9e-46eb-a875-8db435d4eabc\" (UID: \"b23712c2-6e9e-46eb-a875-8db435d4eabc\") " Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.176063 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6xtv\" (UniqueName: \"kubernetes.io/projected/89a33869-1bc5-478d-bf66-8d6cde7e4991-kube-api-access-q6xtv\") pod \"89a33869-1bc5-478d-bf66-8d6cde7e4991\" (UID: \"89a33869-1bc5-478d-bf66-8d6cde7e4991\") " Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.176116 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89a33869-1bc5-478d-bf66-8d6cde7e4991-utilities\") pod \"89a33869-1bc5-478d-bf66-8d6cde7e4991\" (UID: \"89a33869-1bc5-478d-bf66-8d6cde7e4991\") " Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.176179 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89a33869-1bc5-478d-bf66-8d6cde7e4991-catalog-content\") pod \"89a33869-1bc5-478d-bf66-8d6cde7e4991\" (UID: \"89a33869-1bc5-478d-bf66-8d6cde7e4991\") " Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.176214 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgssv\" (UniqueName: \"kubernetes.io/projected/b23712c2-6e9e-46eb-a875-8db435d4eabc-kube-api-access-hgssv\") pod \"b23712c2-6e9e-46eb-a875-8db435d4eabc\" (UID: \"b23712c2-6e9e-46eb-a875-8db435d4eabc\") " Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.176249 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b23712c2-6e9e-46eb-a875-8db435d4eabc-utilities\") pod \"b23712c2-6e9e-46eb-a875-8db435d4eabc\" (UID: \"b23712c2-6e9e-46eb-a875-8db435d4eabc\") " Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.177345 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89a33869-1bc5-478d-bf66-8d6cde7e4991-utilities" (OuterVolumeSpecName: "utilities") pod "89a33869-1bc5-478d-bf66-8d6cde7e4991" (UID: "89a33869-1bc5-478d-bf66-8d6cde7e4991"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.177426 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b23712c2-6e9e-46eb-a875-8db435d4eabc-utilities" (OuterVolumeSpecName: "utilities") pod "b23712c2-6e9e-46eb-a875-8db435d4eabc" (UID: "b23712c2-6e9e-46eb-a875-8db435d4eabc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.185795 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89a33869-1bc5-478d-bf66-8d6cde7e4991-kube-api-access-q6xtv" (OuterVolumeSpecName: "kube-api-access-q6xtv") pod "89a33869-1bc5-478d-bf66-8d6cde7e4991" (UID: "89a33869-1bc5-478d-bf66-8d6cde7e4991"). InnerVolumeSpecName "kube-api-access-q6xtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.187368 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b23712c2-6e9e-46eb-a875-8db435d4eabc-kube-api-access-hgssv" (OuterVolumeSpecName: "kube-api-access-hgssv") pod "b23712c2-6e9e-46eb-a875-8db435d4eabc" (UID: "b23712c2-6e9e-46eb-a875-8db435d4eabc"). InnerVolumeSpecName "kube-api-access-hgssv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.197055 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b23712c2-6e9e-46eb-a875-8db435d4eabc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b23712c2-6e9e-46eb-a875-8db435d4eabc" (UID: "b23712c2-6e9e-46eb-a875-8db435d4eabc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.198084 4935 scope.go:117] "RemoveContainer" containerID="815f8ad0e03cadaf6aa668d0b7f6d37de25f6406b536a16f10bd3c1469d1086e" Dec 01 18:33:29 crc kubenswrapper[4935]: E1201 18:33:29.198739 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"815f8ad0e03cadaf6aa668d0b7f6d37de25f6406b536a16f10bd3c1469d1086e\": container with ID starting with 815f8ad0e03cadaf6aa668d0b7f6d37de25f6406b536a16f10bd3c1469d1086e not found: ID does not exist" containerID="815f8ad0e03cadaf6aa668d0b7f6d37de25f6406b536a16f10bd3c1469d1086e" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.198815 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"815f8ad0e03cadaf6aa668d0b7f6d37de25f6406b536a16f10bd3c1469d1086e"} err="failed to get container status \"815f8ad0e03cadaf6aa668d0b7f6d37de25f6406b536a16f10bd3c1469d1086e\": rpc error: code = NotFound desc = could not find container \"815f8ad0e03cadaf6aa668d0b7f6d37de25f6406b536a16f10bd3c1469d1086e\": container with ID starting with 815f8ad0e03cadaf6aa668d0b7f6d37de25f6406b536a16f10bd3c1469d1086e not found: ID does not exist" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.198851 4935 scope.go:117] "RemoveContainer" containerID="cbf7c28055f80176d7171480f54b8687153209ec9dad4a0ce10395dcee5890d6" Dec 01 18:33:29 crc kubenswrapper[4935]: E1201 18:33:29.199473 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbf7c28055f80176d7171480f54b8687153209ec9dad4a0ce10395dcee5890d6\": container with ID starting with cbf7c28055f80176d7171480f54b8687153209ec9dad4a0ce10395dcee5890d6 not found: ID does not exist" containerID="cbf7c28055f80176d7171480f54b8687153209ec9dad4a0ce10395dcee5890d6" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.199523 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbf7c28055f80176d7171480f54b8687153209ec9dad4a0ce10395dcee5890d6"} err="failed to get container status \"cbf7c28055f80176d7171480f54b8687153209ec9dad4a0ce10395dcee5890d6\": rpc error: code = NotFound desc = could not find container \"cbf7c28055f80176d7171480f54b8687153209ec9dad4a0ce10395dcee5890d6\": container with ID starting with cbf7c28055f80176d7171480f54b8687153209ec9dad4a0ce10395dcee5890d6 not found: ID does not exist" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.199576 4935 scope.go:117] "RemoveContainer" containerID="337957ee86fc92f9199aa4685dfa7a94e1ae5ba07730cfe0f63ac6c416723874" Dec 01 18:33:29 crc kubenswrapper[4935]: E1201 18:33:29.200004 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"337957ee86fc92f9199aa4685dfa7a94e1ae5ba07730cfe0f63ac6c416723874\": container with ID starting with 337957ee86fc92f9199aa4685dfa7a94e1ae5ba07730cfe0f63ac6c416723874 not found: ID does not exist" containerID="337957ee86fc92f9199aa4685dfa7a94e1ae5ba07730cfe0f63ac6c416723874" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.200059 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"337957ee86fc92f9199aa4685dfa7a94e1ae5ba07730cfe0f63ac6c416723874"} err="failed to get container status \"337957ee86fc92f9199aa4685dfa7a94e1ae5ba07730cfe0f63ac6c416723874\": rpc error: code = NotFound desc = could not find container \"337957ee86fc92f9199aa4685dfa7a94e1ae5ba07730cfe0f63ac6c416723874\": container with ID starting with 337957ee86fc92f9199aa4685dfa7a94e1ae5ba07730cfe0f63ac6c416723874 not found: ID does not exist" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.277832 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6xtv\" (UniqueName: \"kubernetes.io/projected/89a33869-1bc5-478d-bf66-8d6cde7e4991-kube-api-access-q6xtv\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.277866 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89a33869-1bc5-478d-bf66-8d6cde7e4991-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.277876 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgssv\" (UniqueName: \"kubernetes.io/projected/b23712c2-6e9e-46eb-a875-8db435d4eabc-kube-api-access-hgssv\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.277886 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b23712c2-6e9e-46eb-a875-8db435d4eabc-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.277895 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b23712c2-6e9e-46eb-a875-8db435d4eabc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.289580 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89a33869-1bc5-478d-bf66-8d6cde7e4991-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "89a33869-1bc5-478d-bf66-8d6cde7e4991" (UID: "89a33869-1bc5-478d-bf66-8d6cde7e4991"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.379889 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89a33869-1bc5-478d-bf66-8d6cde7e4991-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.418237 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxwbt"] Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.422452 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxwbt"] Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.425107 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7nwkm"] Dec 01 18:33:29 crc kubenswrapper[4935]: I1201 18:33:29.427559 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7nwkm"] Dec 01 18:33:30 crc kubenswrapper[4935]: I1201 18:33:30.515500 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89a33869-1bc5-478d-bf66-8d6cde7e4991" path="/var/lib/kubelet/pods/89a33869-1bc5-478d-bf66-8d6cde7e4991/volumes" Dec 01 18:33:30 crc kubenswrapper[4935]: I1201 18:33:30.516126 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b23712c2-6e9e-46eb-a875-8db435d4eabc" path="/var/lib/kubelet/pods/b23712c2-6e9e-46eb-a875-8db435d4eabc/volumes" Dec 01 18:33:32 crc kubenswrapper[4935]: I1201 18:33:32.406318 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:33:32 crc kubenswrapper[4935]: I1201 18:33:32.408404 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:33:32 crc kubenswrapper[4935]: I1201 18:33:32.481964 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:33:32 crc kubenswrapper[4935]: I1201 18:33:32.738269 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:33:32 crc kubenswrapper[4935]: I1201 18:33:32.738345 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:33:32 crc kubenswrapper[4935]: I1201 18:33:32.808341 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:33:33 crc kubenswrapper[4935]: I1201 18:33:33.123980 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pgjz6"] Dec 01 18:33:33 crc kubenswrapper[4935]: I1201 18:33:33.190956 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:33:33 crc kubenswrapper[4935]: I1201 18:33:33.197014 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:33:33 crc kubenswrapper[4935]: I1201 18:33:33.830555 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mvlbn"] Dec 01 18:33:34 crc kubenswrapper[4935]: I1201 18:33:34.270531 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:33:34 crc kubenswrapper[4935]: I1201 18:33:34.327193 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:33:35 crc kubenswrapper[4935]: I1201 18:33:35.129250 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mvlbn" podUID="558a5c77-7d48-4ebf-af67-d42e717939d5" containerName="registry-server" containerID="cri-o://9677ce911b264996a09ad84abe438c60743f4b799ef3a6b8afa3ac30ba65e652" gracePeriod=2 Dec 01 18:33:35 crc kubenswrapper[4935]: I1201 18:33:35.463374 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:33:35 crc kubenswrapper[4935]: I1201 18:33:35.529455 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.107316 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.142998 4935 generic.go:334] "Generic (PLEG): container finished" podID="558a5c77-7d48-4ebf-af67-d42e717939d5" containerID="9677ce911b264996a09ad84abe438c60743f4b799ef3a6b8afa3ac30ba65e652" exitCode=0 Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.144221 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mvlbn" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.144817 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvlbn" event={"ID":"558a5c77-7d48-4ebf-af67-d42e717939d5","Type":"ContainerDied","Data":"9677ce911b264996a09ad84abe438c60743f4b799ef3a6b8afa3ac30ba65e652"} Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.144872 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvlbn" event={"ID":"558a5c77-7d48-4ebf-af67-d42e717939d5","Type":"ContainerDied","Data":"e2c7aad749a73c86604fff2c0bd5ff6e6d45fde62effc10482a4442fea1547d3"} Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.144905 4935 scope.go:117] "RemoveContainer" containerID="9677ce911b264996a09ad84abe438c60743f4b799ef3a6b8afa3ac30ba65e652" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.177299 4935 scope.go:117] "RemoveContainer" containerID="c0c37e49dbc23ff40481eeec882d8ebd9b8bfd608c984d9d305fa6fc85ac48ad" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.202741 4935 scope.go:117] "RemoveContainer" containerID="829101e68e69c319e12e2c9611bb5d4e5a3219836164c767be408621d035815f" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.222632 4935 scope.go:117] "RemoveContainer" containerID="9677ce911b264996a09ad84abe438c60743f4b799ef3a6b8afa3ac30ba65e652" Dec 01 18:33:36 crc kubenswrapper[4935]: E1201 18:33:36.223364 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9677ce911b264996a09ad84abe438c60743f4b799ef3a6b8afa3ac30ba65e652\": container with ID starting with 9677ce911b264996a09ad84abe438c60743f4b799ef3a6b8afa3ac30ba65e652 not found: ID does not exist" containerID="9677ce911b264996a09ad84abe438c60743f4b799ef3a6b8afa3ac30ba65e652" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.223430 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9677ce911b264996a09ad84abe438c60743f4b799ef3a6b8afa3ac30ba65e652"} err="failed to get container status \"9677ce911b264996a09ad84abe438c60743f4b799ef3a6b8afa3ac30ba65e652\": rpc error: code = NotFound desc = could not find container \"9677ce911b264996a09ad84abe438c60743f4b799ef3a6b8afa3ac30ba65e652\": container with ID starting with 9677ce911b264996a09ad84abe438c60743f4b799ef3a6b8afa3ac30ba65e652 not found: ID does not exist" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.223476 4935 scope.go:117] "RemoveContainer" containerID="c0c37e49dbc23ff40481eeec882d8ebd9b8bfd608c984d9d305fa6fc85ac48ad" Dec 01 18:33:36 crc kubenswrapper[4935]: E1201 18:33:36.223956 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0c37e49dbc23ff40481eeec882d8ebd9b8bfd608c984d9d305fa6fc85ac48ad\": container with ID starting with c0c37e49dbc23ff40481eeec882d8ebd9b8bfd608c984d9d305fa6fc85ac48ad not found: ID does not exist" containerID="c0c37e49dbc23ff40481eeec882d8ebd9b8bfd608c984d9d305fa6fc85ac48ad" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.224008 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0c37e49dbc23ff40481eeec882d8ebd9b8bfd608c984d9d305fa6fc85ac48ad"} err="failed to get container status \"c0c37e49dbc23ff40481eeec882d8ebd9b8bfd608c984d9d305fa6fc85ac48ad\": rpc error: code = NotFound desc = could not find container \"c0c37e49dbc23ff40481eeec882d8ebd9b8bfd608c984d9d305fa6fc85ac48ad\": container with ID starting with c0c37e49dbc23ff40481eeec882d8ebd9b8bfd608c984d9d305fa6fc85ac48ad not found: ID does not exist" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.224041 4935 scope.go:117] "RemoveContainer" containerID="829101e68e69c319e12e2c9611bb5d4e5a3219836164c767be408621d035815f" Dec 01 18:33:36 crc kubenswrapper[4935]: E1201 18:33:36.224433 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"829101e68e69c319e12e2c9611bb5d4e5a3219836164c767be408621d035815f\": container with ID starting with 829101e68e69c319e12e2c9611bb5d4e5a3219836164c767be408621d035815f not found: ID does not exist" containerID="829101e68e69c319e12e2c9611bb5d4e5a3219836164c767be408621d035815f" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.224469 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"829101e68e69c319e12e2c9611bb5d4e5a3219836164c767be408621d035815f"} err="failed to get container status \"829101e68e69c319e12e2c9611bb5d4e5a3219836164c767be408621d035815f\": rpc error: code = NotFound desc = could not find container \"829101e68e69c319e12e2c9611bb5d4e5a3219836164c767be408621d035815f\": container with ID starting with 829101e68e69c319e12e2c9611bb5d4e5a3219836164c767be408621d035815f not found: ID does not exist" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.288643 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5cdc\" (UniqueName: \"kubernetes.io/projected/558a5c77-7d48-4ebf-af67-d42e717939d5-kube-api-access-h5cdc\") pod \"558a5c77-7d48-4ebf-af67-d42e717939d5\" (UID: \"558a5c77-7d48-4ebf-af67-d42e717939d5\") " Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.288877 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558a5c77-7d48-4ebf-af67-d42e717939d5-utilities\") pod \"558a5c77-7d48-4ebf-af67-d42e717939d5\" (UID: \"558a5c77-7d48-4ebf-af67-d42e717939d5\") " Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.288922 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558a5c77-7d48-4ebf-af67-d42e717939d5-catalog-content\") pod \"558a5c77-7d48-4ebf-af67-d42e717939d5\" (UID: \"558a5c77-7d48-4ebf-af67-d42e717939d5\") " Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.290469 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/558a5c77-7d48-4ebf-af67-d42e717939d5-utilities" (OuterVolumeSpecName: "utilities") pod "558a5c77-7d48-4ebf-af67-d42e717939d5" (UID: "558a5c77-7d48-4ebf-af67-d42e717939d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.295861 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/558a5c77-7d48-4ebf-af67-d42e717939d5-kube-api-access-h5cdc" (OuterVolumeSpecName: "kube-api-access-h5cdc") pod "558a5c77-7d48-4ebf-af67-d42e717939d5" (UID: "558a5c77-7d48-4ebf-af67-d42e717939d5"). InnerVolumeSpecName "kube-api-access-h5cdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.353339 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/558a5c77-7d48-4ebf-af67-d42e717939d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "558a5c77-7d48-4ebf-af67-d42e717939d5" (UID: "558a5c77-7d48-4ebf-af67-d42e717939d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.391826 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5cdc\" (UniqueName: \"kubernetes.io/projected/558a5c77-7d48-4ebf-af67-d42e717939d5-kube-api-access-h5cdc\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.391891 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/558a5c77-7d48-4ebf-af67-d42e717939d5-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.391912 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/558a5c77-7d48-4ebf-af67-d42e717939d5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.500065 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mvlbn"] Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.504062 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mvlbn"] Dec 01 18:33:36 crc kubenswrapper[4935]: I1201 18:33:36.516671 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="558a5c77-7d48-4ebf-af67-d42e717939d5" path="/var/lib/kubelet/pods/558a5c77-7d48-4ebf-af67-d42e717939d5/volumes" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.312914 4935 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.314079 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b23712c2-6e9e-46eb-a875-8db435d4eabc" containerName="registry-server" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314135 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b23712c2-6e9e-46eb-a875-8db435d4eabc" containerName="registry-server" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.314170 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a33869-1bc5-478d-bf66-8d6cde7e4991" containerName="registry-server" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314177 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a33869-1bc5-478d-bf66-8d6cde7e4991" containerName="registry-server" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.314188 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" containerName="extract-utilities" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314194 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" containerName="extract-utilities" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.314205 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" containerName="registry-server" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314211 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" containerName="registry-server" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.314220 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a33869-1bc5-478d-bf66-8d6cde7e4991" containerName="extract-utilities" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314226 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a33869-1bc5-478d-bf66-8d6cde7e4991" containerName="extract-utilities" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.314238 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="558a5c77-7d48-4ebf-af67-d42e717939d5" containerName="extract-content" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314245 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="558a5c77-7d48-4ebf-af67-d42e717939d5" containerName="extract-content" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.314259 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b23712c2-6e9e-46eb-a875-8db435d4eabc" containerName="extract-content" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314265 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b23712c2-6e9e-46eb-a875-8db435d4eabc" containerName="extract-content" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.314300 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" containerName="extract-content" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314306 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" containerName="extract-content" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.314314 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="558a5c77-7d48-4ebf-af67-d42e717939d5" containerName="registry-server" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314321 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="558a5c77-7d48-4ebf-af67-d42e717939d5" containerName="registry-server" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.314330 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="558a5c77-7d48-4ebf-af67-d42e717939d5" containerName="extract-utilities" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314336 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="558a5c77-7d48-4ebf-af67-d42e717939d5" containerName="extract-utilities" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.314345 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b23712c2-6e9e-46eb-a875-8db435d4eabc" containerName="extract-utilities" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314351 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b23712c2-6e9e-46eb-a875-8db435d4eabc" containerName="extract-utilities" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.314360 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a33869-1bc5-478d-bf66-8d6cde7e4991" containerName="extract-content" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314385 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a33869-1bc5-478d-bf66-8d6cde7e4991" containerName="extract-content" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314487 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="558a5c77-7d48-4ebf-af67-d42e717939d5" containerName="registry-server" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314501 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf5e8422-11c7-41d2-9c3f-4ed3aa3cc10a" containerName="registry-server" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314513 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a33869-1bc5-478d-bf66-8d6cde7e4991" containerName="registry-server" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.314519 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="b23712c2-6e9e-46eb-a875-8db435d4eabc" containerName="registry-server" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.315020 4935 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.315239 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.315387 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa" gracePeriod=15 Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.315434 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf" gracePeriod=15 Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.315474 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e" gracePeriod=15 Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.315577 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96" gracePeriod=15 Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.315466 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72" gracePeriod=15 Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.316244 4935 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.316374 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.316385 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.316394 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.316401 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.316410 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.316416 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.316428 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.316434 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.316444 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.316450 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.316462 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.316468 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.316560 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.316597 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.316607 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.316614 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.316622 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.332081 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.332300 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.333321 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.333670 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.333955 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.334085 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.334273 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.334492 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.378188 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.436683 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.436747 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.436800 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.436831 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.436869 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.436901 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.436935 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.436963 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.436952 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.437093 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.437093 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.437183 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.437212 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.437217 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.437237 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.437293 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: I1201 18:33:51.662606 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:33:51 crc kubenswrapper[4935]: W1201 18:33:51.680975 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-7fce09f7e13df0b4575d837ae14db4eecf6b769d524aae6c0b9cb8727eea129b WatchSource:0}: Error finding container 7fce09f7e13df0b4575d837ae14db4eecf6b769d524aae6c0b9cb8727eea129b: Status 404 returned error can't find the container with id 7fce09f7e13df0b4575d837ae14db4eecf6b769d524aae6c0b9cb8727eea129b Dec 01 18:33:51 crc kubenswrapper[4935]: E1201 18:33:51.684931 4935 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.65:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d2b1ccc31c64d openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 18:33:51.683995213 +0000 UTC m=+245.705624472,LastTimestamp:2025-12-01 18:33:51.683995213 +0000 UTC m=+245.705624472,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.261443 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"0887262ab458fc2d2acb9645bc95ab81e711e65bf1f98cce53621a9008dc6603"} Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.262051 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"7fce09f7e13df0b4575d837ae14db4eecf6b769d524aae6c0b9cb8727eea129b"} Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.262428 4935 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.262700 4935 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.265517 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.266541 4935 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf" exitCode=0 Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.266599 4935 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e" exitCode=0 Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.266617 4935 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa" exitCode=0 Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.266638 4935 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72" exitCode=2 Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.269021 4935 generic.go:334] "Generic (PLEG): container finished" podID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" containerID="ba74e1034fe339e27be5a1a4dde7a0c7b0405f0258e14481d31c32f60f6e08ac" exitCode=0 Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.269078 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97","Type":"ContainerDied","Data":"ba74e1034fe339e27be5a1a4dde7a0c7b0405f0258e14481d31c32f60f6e08ac"} Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.269751 4935 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.270463 4935 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.271174 4935 status_manager.go:851] "Failed to get status for pod" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.514289 4935 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 01 18:33:52 crc kubenswrapper[4935]: I1201 18:33:52.514705 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.712093 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.713216 4935 status_manager.go:851] "Failed to get status for pod" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.713728 4935 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.880685 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.881888 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.882718 4935 status_manager.go:851] "Failed to get status for pod" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.883325 4935 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.884039 4935 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.901640 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-kube-api-access\") pod \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\" (UID: \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\") " Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.901736 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-kubelet-dir\") pod \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\" (UID: \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\") " Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.901847 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-var-lock\") pod \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\" (UID: \"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97\") " Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.901995 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" (UID: "68d1fe1e-75ec-4e41-bd04-f90ab04c7a97"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.902192 4935 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.902132 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-var-lock" (OuterVolumeSpecName: "var-lock") pod "68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" (UID: "68d1fe1e-75ec-4e41-bd04-f90ab04c7a97"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:33:53 crc kubenswrapper[4935]: I1201 18:33:53.909043 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" (UID: "68d1fe1e-75ec-4e41-bd04-f90ab04c7a97"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.003407 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.003504 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.003548 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.003608 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.003679 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.003729 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.004015 4935 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-var-lock\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.004042 4935 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.004054 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/68d1fe1e-75ec-4e41-bd04-f90ab04c7a97-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.004069 4935 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.004083 4935 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.288881 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.290466 4935 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96" exitCode=0 Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.290601 4935 scope.go:117] "RemoveContainer" containerID="9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.290628 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.292547 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"68d1fe1e-75ec-4e41-bd04-f90ab04c7a97","Type":"ContainerDied","Data":"b68cc3333448c1a602faf0ffe0db614d8f56e9b0a373d25d4cbd2900631f919d"} Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.292587 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b68cc3333448c1a602faf0ffe0db614d8f56e9b0a373d25d4cbd2900631f919d" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.292665 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.310766 4935 scope.go:117] "RemoveContainer" containerID="2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.311694 4935 status_manager.go:851] "Failed to get status for pod" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.313129 4935 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.313965 4935 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.315334 4935 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.315868 4935 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.316424 4935 status_manager.go:851] "Failed to get status for pod" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.326690 4935 scope.go:117] "RemoveContainer" containerID="8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.339903 4935 scope.go:117] "RemoveContainer" containerID="6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.353509 4935 scope.go:117] "RemoveContainer" containerID="6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.353861 4935 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.354397 4935 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.354834 4935 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.355277 4935 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.355764 4935 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.355817 4935 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.356228 4935 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="200ms" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.373086 4935 scope.go:117] "RemoveContainer" containerID="ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.403442 4935 scope.go:117] "RemoveContainer" containerID="9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.404524 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\": container with ID starting with 9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf not found: ID does not exist" containerID="9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.404568 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf"} err="failed to get container status \"9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\": rpc error: code = NotFound desc = could not find container \"9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf\": container with ID starting with 9ba93f5d4dff5dba87a73fb62f7d5d295600de80e08fec6b9d845cb1447d2ddf not found: ID does not exist" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.404605 4935 scope.go:117] "RemoveContainer" containerID="2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.405169 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\": container with ID starting with 2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e not found: ID does not exist" containerID="2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.405193 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e"} err="failed to get container status \"2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\": rpc error: code = NotFound desc = could not find container \"2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e\": container with ID starting with 2ac0d563db61cdb85f707845a3e74ce457f7694aba3f5d9f4b3860fc90928a2e not found: ID does not exist" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.405211 4935 scope.go:117] "RemoveContainer" containerID="8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.405503 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\": container with ID starting with 8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa not found: ID does not exist" containerID="8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.405529 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa"} err="failed to get container status \"8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\": rpc error: code = NotFound desc = could not find container \"8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa\": container with ID starting with 8999bc0d0b25665297940e0d10c498e14359684d349c0acc65e9ce34a21a87aa not found: ID does not exist" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.405551 4935 scope.go:117] "RemoveContainer" containerID="6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.406907 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\": container with ID starting with 6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72 not found: ID does not exist" containerID="6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.406967 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72"} err="failed to get container status \"6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\": rpc error: code = NotFound desc = could not find container \"6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72\": container with ID starting with 6c58e287e439fbcc16f0ca1f89ee1037561af95ff27f04a0fd42c80b0d587b72 not found: ID does not exist" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.406993 4935 scope.go:117] "RemoveContainer" containerID="6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.408019 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\": container with ID starting with 6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96 not found: ID does not exist" containerID="6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.408075 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96"} err="failed to get container status \"6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\": rpc error: code = NotFound desc = could not find container \"6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96\": container with ID starting with 6c5f50b532c0f52297330b212154aed2936018f2f0eca5e2160bf65597da2a96 not found: ID does not exist" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.408108 4935 scope.go:117] "RemoveContainer" containerID="ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.408796 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\": container with ID starting with ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952 not found: ID does not exist" containerID="ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.408857 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952"} err="failed to get container status \"ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\": rpc error: code = NotFound desc = could not find container \"ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952\": container with ID starting with ec9f40f1cf1c4feba7185bb87cb8fc4dbbffac5fb9278574208d0abcbf436952 not found: ID does not exist" Dec 01 18:33:54 crc kubenswrapper[4935]: I1201 18:33:54.515699 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.557437 4935 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="400ms" Dec 01 18:33:54 crc kubenswrapper[4935]: E1201 18:33:54.958094 4935 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="800ms" Dec 01 18:33:55 crc kubenswrapper[4935]: E1201 18:33:55.759404 4935 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="1.6s" Dec 01 18:33:56 crc kubenswrapper[4935]: I1201 18:33:56.512429 4935 status_manager.go:851] "Failed to get status for pod" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:56 crc kubenswrapper[4935]: I1201 18:33:56.512864 4935 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:57 crc kubenswrapper[4935]: E1201 18:33:57.360978 4935 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="3.2s" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.169029 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" podUID="3bf20ee6-67c1-47a6-b47e-5de4d187a495" containerName="oauth-openshift" containerID="cri-o://f82783ec598686f5a24a773502e1ae1f33fbce0d3179cd5b78724ab43eb16ca3" gracePeriod=15 Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.335919 4935 generic.go:334] "Generic (PLEG): container finished" podID="3bf20ee6-67c1-47a6-b47e-5de4d187a495" containerID="f82783ec598686f5a24a773502e1ae1f33fbce0d3179cd5b78724ab43eb16ca3" exitCode=0 Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.336042 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" event={"ID":"3bf20ee6-67c1-47a6-b47e-5de4d187a495","Type":"ContainerDied","Data":"f82783ec598686f5a24a773502e1ae1f33fbce0d3179cd5b78724ab43eb16ca3"} Dec 01 18:33:58 crc kubenswrapper[4935]: E1201 18:33:58.585571 4935 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.65:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" volumeName="registry-storage" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.685268 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.686298 4935 status_manager.go:851] "Failed to get status for pod" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.686947 4935 status_manager.go:851] "Failed to get status for pod" podUID="3bf20ee6-67c1-47a6-b47e-5de4d187a495" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pgjz6\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.687439 4935 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.787440 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-trusted-ca-bundle\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.787562 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-error\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.787621 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llrgx\" (UniqueName: \"kubernetes.io/projected/3bf20ee6-67c1-47a6-b47e-5de4d187a495-kube-api-access-llrgx\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.787691 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-ocp-branding-template\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.787741 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-session\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.787800 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-provider-selection\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.787891 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-cliconfig\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.789136 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-idp-0-file-data\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.789351 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-login\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.789234 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.789422 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3bf20ee6-67c1-47a6-b47e-5de4d187a495-audit-dir\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.789470 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3bf20ee6-67c1-47a6-b47e-5de4d187a495-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.789591 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-service-ca\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.789724 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-audit-policies\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.789853 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-router-certs\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.789931 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-serving-cert\") pod \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\" (UID: \"3bf20ee6-67c1-47a6-b47e-5de4d187a495\") " Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.790736 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.790987 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.791017 4935 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3bf20ee6-67c1-47a6-b47e-5de4d187a495-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.791042 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.792111 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.792137 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.797202 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bf20ee6-67c1-47a6-b47e-5de4d187a495-kube-api-access-llrgx" (OuterVolumeSpecName: "kube-api-access-llrgx") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "kube-api-access-llrgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.798625 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.798932 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.799477 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.801251 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.800659 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.801530 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.801745 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.802468 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "3bf20ee6-67c1-47a6-b47e-5de4d187a495" (UID: "3bf20ee6-67c1-47a6-b47e-5de4d187a495"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.894440 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.894500 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.894515 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.894525 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.894536 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llrgx\" (UniqueName: \"kubernetes.io/projected/3bf20ee6-67c1-47a6-b47e-5de4d187a495-kube-api-access-llrgx\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.894546 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.894556 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.894566 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.894580 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.894590 4935 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3bf20ee6-67c1-47a6-b47e-5de4d187a495-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:58 crc kubenswrapper[4935]: I1201 18:33:58.894604 4935 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3bf20ee6-67c1-47a6-b47e-5de4d187a495-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 18:33:59 crc kubenswrapper[4935]: I1201 18:33:59.350285 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" event={"ID":"3bf20ee6-67c1-47a6-b47e-5de4d187a495","Type":"ContainerDied","Data":"ced12cf8269628e08f6885c7456be2df54f720c15be1a6b7fb822e44ca002b37"} Dec 01 18:33:59 crc kubenswrapper[4935]: I1201 18:33:59.350938 4935 scope.go:117] "RemoveContainer" containerID="f82783ec598686f5a24a773502e1ae1f33fbce0d3179cd5b78724ab43eb16ca3" Dec 01 18:33:59 crc kubenswrapper[4935]: I1201 18:33:59.351351 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" Dec 01 18:33:59 crc kubenswrapper[4935]: I1201 18:33:59.353804 4935 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:59 crc kubenswrapper[4935]: I1201 18:33:59.354648 4935 status_manager.go:851] "Failed to get status for pod" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:59 crc kubenswrapper[4935]: I1201 18:33:59.355416 4935 status_manager.go:851] "Failed to get status for pod" podUID="3bf20ee6-67c1-47a6-b47e-5de4d187a495" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pgjz6\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:59 crc kubenswrapper[4935]: I1201 18:33:59.380112 4935 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:59 crc kubenswrapper[4935]: I1201 18:33:59.380711 4935 status_manager.go:851] "Failed to get status for pod" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:33:59 crc kubenswrapper[4935]: I1201 18:33:59.381135 4935 status_manager.go:851] "Failed to get status for pod" podUID="3bf20ee6-67c1-47a6-b47e-5de4d187a495" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pgjz6\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:34:00 crc kubenswrapper[4935]: E1201 18:34:00.562894 4935 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.65:6443: connect: connection refused" interval="6.4s" Dec 01 18:34:00 crc kubenswrapper[4935]: E1201 18:34:00.611568 4935 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.65:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d2b1ccc31c64d openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 18:33:51.683995213 +0000 UTC m=+245.705624472,LastTimestamp:2025-12-01 18:33:51.683995213 +0000 UTC m=+245.705624472,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 18:34:04 crc kubenswrapper[4935]: I1201 18:34:04.508243 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:34:04 crc kubenswrapper[4935]: I1201 18:34:04.510193 4935 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:34:04 crc kubenswrapper[4935]: I1201 18:34:04.516532 4935 status_manager.go:851] "Failed to get status for pod" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:34:04 crc kubenswrapper[4935]: I1201 18:34:04.517549 4935 status_manager.go:851] "Failed to get status for pod" podUID="3bf20ee6-67c1-47a6-b47e-5de4d187a495" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pgjz6\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:34:04 crc kubenswrapper[4935]: I1201 18:34:04.531112 4935 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="985b40e4-7d32-4219-aa6a-a13bc94263fe" Dec 01 18:34:04 crc kubenswrapper[4935]: I1201 18:34:04.531186 4935 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="985b40e4-7d32-4219-aa6a-a13bc94263fe" Dec 01 18:34:04 crc kubenswrapper[4935]: E1201 18:34:04.531832 4935 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:34:04 crc kubenswrapper[4935]: I1201 18:34:04.532711 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:34:04 crc kubenswrapper[4935]: W1201 18:34:04.565576 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-76d9b1eb82ddca236b573116e1e4faedc9d96739aa6b1a16393710f74d306947 WatchSource:0}: Error finding container 76d9b1eb82ddca236b573116e1e4faedc9d96739aa6b1a16393710f74d306947: Status 404 returned error can't find the container with id 76d9b1eb82ddca236b573116e1e4faedc9d96739aa6b1a16393710f74d306947 Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.423929 4935 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="d84e5e870b588bdf73c3c03742aee4244df0ceb77f8695fb30987a2eecb99e49" exitCode=0 Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.424079 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"d84e5e870b588bdf73c3c03742aee4244df0ceb77f8695fb30987a2eecb99e49"} Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.424598 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"76d9b1eb82ddca236b573116e1e4faedc9d96739aa6b1a16393710f74d306947"} Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.425204 4935 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="985b40e4-7d32-4219-aa6a-a13bc94263fe" Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.425239 4935 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="985b40e4-7d32-4219-aa6a-a13bc94263fe" Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.426062 4935 status_manager.go:851] "Failed to get status for pod" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:34:05 crc kubenswrapper[4935]: E1201 18:34:05.426089 4935 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.427560 4935 status_manager.go:851] "Failed to get status for pod" podUID="3bf20ee6-67c1-47a6-b47e-5de4d187a495" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pgjz6\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.427947 4935 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.431927 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.432011 4935 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa" exitCode=1 Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.432069 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa"} Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.432870 4935 scope.go:117] "RemoveContainer" containerID="32fb2a6e53e1c5e8961d4e26697e666b27c4c8e49ad2699f8c573a54971456aa" Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.434185 4935 status_manager.go:851] "Failed to get status for pod" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.434594 4935 status_manager.go:851] "Failed to get status for pod" podUID="3bf20ee6-67c1-47a6-b47e-5de4d187a495" pod="openshift-authentication/oauth-openshift-558db77b4-pgjz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-pgjz6\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.434927 4935 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:34:05 crc kubenswrapper[4935]: I1201 18:34:05.435863 4935 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.65:6443: connect: connection refused" Dec 01 18:34:06 crc kubenswrapper[4935]: I1201 18:34:06.047594 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:34:06 crc kubenswrapper[4935]: I1201 18:34:06.447758 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 01 18:34:06 crc kubenswrapper[4935]: I1201 18:34:06.449106 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0a193c5acb134583ae9496ffddb714e0ec3ffa2192fe34568d7413674b289f80"} Dec 01 18:34:06 crc kubenswrapper[4935]: I1201 18:34:06.452886 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9a9bfe5bfac256336322e1fb7cff79ef69b60525622028a464adcd7a4979d686"} Dec 01 18:34:06 crc kubenswrapper[4935]: I1201 18:34:06.452928 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f3c81a1955c4707678a67833000301bbebb0540fd18b166359cd8aef5efef301"} Dec 01 18:34:06 crc kubenswrapper[4935]: I1201 18:34:06.452947 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b256102b43b5fe2062f7a1965094a9b916451eb87a40c4b1d468949bf87320b2"} Dec 01 18:34:07 crc kubenswrapper[4935]: I1201 18:34:07.462237 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2749181e67daa2f1ef345a6c01ecaf57a275f0af24ef7ab34260bce228094fed"} Dec 01 18:34:07 crc kubenswrapper[4935]: I1201 18:34:07.462298 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"807ce8d0e863ad19084aaa1758277b589bfcd1f8c593fc90f868c9831b5df565"} Dec 01 18:34:07 crc kubenswrapper[4935]: I1201 18:34:07.462686 4935 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="985b40e4-7d32-4219-aa6a-a13bc94263fe" Dec 01 18:34:07 crc kubenswrapper[4935]: I1201 18:34:07.462704 4935 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="985b40e4-7d32-4219-aa6a-a13bc94263fe" Dec 01 18:34:09 crc kubenswrapper[4935]: I1201 18:34:09.102308 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:34:09 crc kubenswrapper[4935]: I1201 18:34:09.533397 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:34:09 crc kubenswrapper[4935]: I1201 18:34:09.533472 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:34:09 crc kubenswrapper[4935]: I1201 18:34:09.540208 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:34:12 crc kubenswrapper[4935]: I1201 18:34:12.483341 4935 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:34:12 crc kubenswrapper[4935]: I1201 18:34:12.582606 4935 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="deaf9d2f-4d78-4220-bef9-da1c8ee45418" Dec 01 18:34:13 crc kubenswrapper[4935]: I1201 18:34:13.443526 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:34:13 crc kubenswrapper[4935]: I1201 18:34:13.449905 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:34:13 crc kubenswrapper[4935]: I1201 18:34:13.515528 4935 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="985b40e4-7d32-4219-aa6a-a13bc94263fe" Dec 01 18:34:13 crc kubenswrapper[4935]: I1201 18:34:13.515586 4935 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="985b40e4-7d32-4219-aa6a-a13bc94263fe" Dec 01 18:34:13 crc kubenswrapper[4935]: I1201 18:34:13.519572 4935 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="deaf9d2f-4d78-4220-bef9-da1c8ee45418" Dec 01 18:34:19 crc kubenswrapper[4935]: I1201 18:34:19.108756 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 18:34:22 crc kubenswrapper[4935]: I1201 18:34:22.689292 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 01 18:34:22 crc kubenswrapper[4935]: I1201 18:34:22.943667 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 01 18:34:23 crc kubenswrapper[4935]: I1201 18:34:23.436180 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 01 18:34:23 crc kubenswrapper[4935]: I1201 18:34:23.923445 4935 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 01 18:34:24 crc kubenswrapper[4935]: I1201 18:34:24.223411 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 01 18:34:24 crc kubenswrapper[4935]: I1201 18:34:24.267273 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 01 18:34:24 crc kubenswrapper[4935]: I1201 18:34:24.360889 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 01 18:34:24 crc kubenswrapper[4935]: I1201 18:34:24.504265 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 01 18:34:24 crc kubenswrapper[4935]: I1201 18:34:24.504878 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 01 18:34:24 crc kubenswrapper[4935]: I1201 18:34:24.521732 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 01 18:34:24 crc kubenswrapper[4935]: I1201 18:34:24.753337 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 01 18:34:24 crc kubenswrapper[4935]: I1201 18:34:24.877813 4935 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 01 18:34:24 crc kubenswrapper[4935]: I1201 18:34:24.879578 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 01 18:34:24 crc kubenswrapper[4935]: I1201 18:34:24.920016 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 18:34:25 crc kubenswrapper[4935]: I1201 18:34:25.056295 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 01 18:34:25 crc kubenswrapper[4935]: I1201 18:34:25.166335 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 18:34:25 crc kubenswrapper[4935]: I1201 18:34:25.268793 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 18:34:25 crc kubenswrapper[4935]: I1201 18:34:25.270117 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 01 18:34:25 crc kubenswrapper[4935]: I1201 18:34:25.421819 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 01 18:34:25 crc kubenswrapper[4935]: I1201 18:34:25.541757 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 01 18:34:25 crc kubenswrapper[4935]: I1201 18:34:25.748648 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 01 18:34:25 crc kubenswrapper[4935]: I1201 18:34:25.809419 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 01 18:34:25 crc kubenswrapper[4935]: I1201 18:34:25.932274 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 01 18:34:25 crc kubenswrapper[4935]: I1201 18:34:25.939630 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.099579 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.103900 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.130700 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.133303 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.181551 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.233426 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.283981 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.334878 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.424479 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.447680 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.455059 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.583946 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.626471 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.636256 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.667641 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.713867 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.845361 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.970102 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 01 18:34:26 crc kubenswrapper[4935]: I1201 18:34:26.973277 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 01 18:34:27 crc kubenswrapper[4935]: I1201 18:34:27.080617 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 01 18:34:27 crc kubenswrapper[4935]: I1201 18:34:27.124486 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 01 18:34:27 crc kubenswrapper[4935]: I1201 18:34:27.240683 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 01 18:34:27 crc kubenswrapper[4935]: I1201 18:34:27.250485 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 01 18:34:27 crc kubenswrapper[4935]: I1201 18:34:27.276417 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 01 18:34:27 crc kubenswrapper[4935]: I1201 18:34:27.478587 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 01 18:34:27 crc kubenswrapper[4935]: I1201 18:34:27.503998 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 01 18:34:27 crc kubenswrapper[4935]: I1201 18:34:27.570103 4935 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 01 18:34:27 crc kubenswrapper[4935]: I1201 18:34:27.580851 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 01 18:34:27 crc kubenswrapper[4935]: I1201 18:34:27.626491 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 01 18:34:27 crc kubenswrapper[4935]: I1201 18:34:27.821698 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 01 18:34:27 crc kubenswrapper[4935]: I1201 18:34:27.932860 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 01 18:34:27 crc kubenswrapper[4935]: I1201 18:34:27.970523 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.063535 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.077367 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.078251 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.110430 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.130895 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.253416 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.263553 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.304037 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.446241 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.585571 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.658293 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.767724 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.769746 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.805357 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.909426 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.946031 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.977502 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.982223 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 01 18:34:28 crc kubenswrapper[4935]: I1201 18:34:28.990591 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.008569 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.106953 4935 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.160336 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.182641 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.255707 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.306608 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.362297 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.462244 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.514915 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.545035 4935 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.687060 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.709456 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.719583 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.721713 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.764040 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.767790 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.814422 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.831765 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.869627 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.878929 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.889430 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.957766 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.982961 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 01 18:34:29 crc kubenswrapper[4935]: I1201 18:34:29.989928 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.096910 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.249469 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.409719 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.439969 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.450840 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.457511 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.473556 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.549902 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.625867 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.652508 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.653850 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.719352 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.722637 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.745906 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.746192 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.747810 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.765831 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 01 18:34:30 crc kubenswrapper[4935]: I1201 18:34:30.875204 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.171851 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.197275 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.417912 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.438537 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.545630 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.565091 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.582049 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.589065 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.652196 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.683572 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.686536 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.763184 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.808125 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 18:34:31 crc kubenswrapper[4935]: I1201 18:34:31.924916 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 01 18:34:32 crc kubenswrapper[4935]: I1201 18:34:32.024048 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 01 18:34:32 crc kubenswrapper[4935]: I1201 18:34:32.156003 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 01 18:34:32 crc kubenswrapper[4935]: I1201 18:34:32.177620 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 01 18:34:32 crc kubenswrapper[4935]: I1201 18:34:32.335999 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 01 18:34:32 crc kubenswrapper[4935]: I1201 18:34:32.500100 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 01 18:34:32 crc kubenswrapper[4935]: I1201 18:34:32.533391 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 01 18:34:32 crc kubenswrapper[4935]: I1201 18:34:32.574022 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 18:34:32 crc kubenswrapper[4935]: I1201 18:34:32.581697 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 01 18:34:32 crc kubenswrapper[4935]: I1201 18:34:32.624004 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 01 18:34:32 crc kubenswrapper[4935]: I1201 18:34:32.648696 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 01 18:34:32 crc kubenswrapper[4935]: I1201 18:34:32.741021 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 01 18:34:32 crc kubenswrapper[4935]: I1201 18:34:32.984043 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.009741 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.013543 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.016810 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.112142 4935 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.112371 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=42.112301871 podStartE2EDuration="42.112301871s" podCreationTimestamp="2025-12-01 18:33:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:34:12.553838784 +0000 UTC m=+266.575468043" watchObservedRunningTime="2025-12-01 18:34:33.112301871 +0000 UTC m=+287.133931130" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.118019 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pgjz6","openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.118099 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-7dc5844c99-6vkg5"] Dec 01 18:34:33 crc kubenswrapper[4935]: E1201 18:34:33.118433 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bf20ee6-67c1-47a6-b47e-5de4d187a495" containerName="oauth-openshift" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.118461 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bf20ee6-67c1-47a6-b47e-5de4d187a495" containerName="oauth-openshift" Dec 01 18:34:33 crc kubenswrapper[4935]: E1201 18:34:33.118473 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" containerName="installer" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.118481 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" containerName="installer" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.118598 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bf20ee6-67c1-47a6-b47e-5de4d187a495" containerName="oauth-openshift" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.118629 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="68d1fe1e-75ec-4e41-bd04-f90ab04c7a97" containerName="installer" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.118865 4935 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="985b40e4-7d32-4219-aa6a-a13bc94263fe" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.118928 4935 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="985b40e4-7d32-4219-aa6a-a13bc94263fe" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.119009 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.119096 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.124378 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.124390 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.124406 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.124402 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.124580 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.127132 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.127468 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.127540 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.127933 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.128399 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.128784 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.128843 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.131424 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.136174 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.138202 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.143177 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.151492 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=21.151469587 podStartE2EDuration="21.151469587s" podCreationTimestamp="2025-12-01 18:34:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:34:33.149619259 +0000 UTC m=+287.171248528" watchObservedRunningTime="2025-12-01 18:34:33.151469587 +0000 UTC m=+287.173098846" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.163528 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-audit-policies\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.163622 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.163663 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.163701 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-router-certs\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.163852 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-audit-dir\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.163938 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.163992 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-user-template-login\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.164077 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.164166 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj9rp\" (UniqueName: \"kubernetes.io/projected/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-kube-api-access-mj9rp\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.164207 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-user-template-error\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.164256 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-service-ca\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.164337 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-session\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.164407 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.164455 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.177030 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.265900 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.265970 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-audit-policies\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.266021 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.266042 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.266068 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-router-certs\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.266094 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-audit-dir\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.266116 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.266163 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-user-template-login\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.266194 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.266219 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj9rp\" (UniqueName: \"kubernetes.io/projected/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-kube-api-access-mj9rp\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.266240 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-user-template-error\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.266257 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-service-ca\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.266280 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-session\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.266313 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.267168 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.267107 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-audit-dir\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.267837 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-service-ca\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.268320 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.268959 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-audit-policies\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.274203 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.274690 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.274991 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-router-certs\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.276781 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-user-template-login\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.277178 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.277470 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-user-template-error\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.280237 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-session\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.293918 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj9rp\" (UniqueName: \"kubernetes.io/projected/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-kube-api-access-mj9rp\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.294292 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f43beba8-f51e-4f8d-8c07-9cd25a6c4803-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7dc5844c99-6vkg5\" (UID: \"f43beba8-f51e-4f8d-8c07-9cd25a6c4803\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.299499 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.326737 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.352911 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.446029 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.458286 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.674975 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.676851 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.699636 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.720670 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.755054 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.780698 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.824408 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.828237 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.872529 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.884662 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.948867 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 01 18:34:33 crc kubenswrapper[4935]: I1201 18:34:33.949996 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.004833 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7dc5844c99-6vkg5"] Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.051921 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.088706 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.146624 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.146840 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.190440 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.192605 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.210919 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.223894 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.250704 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.258323 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.259705 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.294943 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7dc5844c99-6vkg5"] Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.296641 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.342197 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.391798 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.399008 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.410070 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.428770 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.471771 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.495290 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.520874 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bf20ee6-67c1-47a6-b47e-5de4d187a495" path="/var/lib/kubelet/pods/3bf20ee6-67c1-47a6-b47e-5de4d187a495/volumes" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.529163 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.659665 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.678165 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" event={"ID":"f43beba8-f51e-4f8d-8c07-9cd25a6c4803","Type":"ContainerStarted","Data":"76f55390961e0c30f29d9592f9319b1addbff09273a645417a869e454e779bc9"} Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.678234 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" event={"ID":"f43beba8-f51e-4f8d-8c07-9cd25a6c4803","Type":"ContainerStarted","Data":"398dc0d78fe231227460e7c3911a737f0c1d8d6dc9eae08da96b04120ccfbf81"} Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.679081 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.692295 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.695750 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.701269 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" podStartSLOduration=61.701243007 podStartE2EDuration="1m1.701243007s" podCreationTimestamp="2025-12-01 18:33:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:34:34.697578592 +0000 UTC m=+288.719207851" watchObservedRunningTime="2025-12-01 18:34:34.701243007 +0000 UTC m=+288.722872266" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.767531 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.841979 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.971919 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 01 18:34:34 crc kubenswrapper[4935]: I1201 18:34:34.989167 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.001370 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.047891 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.051131 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.071734 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.074078 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.100927 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.185909 4935 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.186245 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://0887262ab458fc2d2acb9645bc95ab81e711e65bf1f98cce53621a9008dc6603" gracePeriod=5 Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.211738 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.305862 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.324586 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7dc5844c99-6vkg5" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.433965 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.480164 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.756884 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.839506 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.903298 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 01 18:34:35 crc kubenswrapper[4935]: I1201 18:34:35.960640 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.043250 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.109600 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.142535 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.173585 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.260993 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.286953 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.402132 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.430095 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.462919 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.501256 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.647001 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.683340 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.774531 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.815241 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.835056 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 01 18:34:36 crc kubenswrapper[4935]: I1201 18:34:36.835878 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 01 18:34:37 crc kubenswrapper[4935]: I1201 18:34:37.022758 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 01 18:34:37 crc kubenswrapper[4935]: I1201 18:34:37.147015 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 01 18:34:37 crc kubenswrapper[4935]: I1201 18:34:37.211408 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 01 18:34:37 crc kubenswrapper[4935]: I1201 18:34:37.442384 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 01 18:34:37 crc kubenswrapper[4935]: I1201 18:34:37.505634 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 01 18:34:37 crc kubenswrapper[4935]: I1201 18:34:37.675603 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 01 18:34:37 crc kubenswrapper[4935]: I1201 18:34:37.783442 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 01 18:34:37 crc kubenswrapper[4935]: I1201 18:34:37.823830 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 01 18:34:37 crc kubenswrapper[4935]: I1201 18:34:37.949479 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 01 18:34:37 crc kubenswrapper[4935]: I1201 18:34:37.992231 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 01 18:34:38 crc kubenswrapper[4935]: I1201 18:34:38.011479 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 01 18:34:38 crc kubenswrapper[4935]: I1201 18:34:38.315252 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 01 18:34:38 crc kubenswrapper[4935]: I1201 18:34:38.508325 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 01 18:34:38 crc kubenswrapper[4935]: I1201 18:34:38.828140 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 01 18:34:39 crc kubenswrapper[4935]: I1201 18:34:39.255835 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 01 18:34:39 crc kubenswrapper[4935]: I1201 18:34:39.862953 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 01 18:34:39 crc kubenswrapper[4935]: I1201 18:34:39.976193 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.724387 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.724514 4935 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="0887262ab458fc2d2acb9645bc95ab81e711e65bf1f98cce53621a9008dc6603" exitCode=137 Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.799799 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.799962 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.995686 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.995876 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.995979 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.995982 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.996081 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.996070 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.996127 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.996119 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.996259 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.996561 4935 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.996594 4935 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.996621 4935 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 01 18:34:40 crc kubenswrapper[4935]: I1201 18:34:40.996646 4935 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 18:34:41 crc kubenswrapper[4935]: I1201 18:34:41.009876 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:34:41 crc kubenswrapper[4935]: I1201 18:34:41.098070 4935 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 18:34:41 crc kubenswrapper[4935]: I1201 18:34:41.735865 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 01 18:34:41 crc kubenswrapper[4935]: I1201 18:34:41.736382 4935 scope.go:117] "RemoveContainer" containerID="0887262ab458fc2d2acb9645bc95ab81e711e65bf1f98cce53621a9008dc6603" Dec 01 18:34:41 crc kubenswrapper[4935]: I1201 18:34:41.736551 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 18:34:42 crc kubenswrapper[4935]: I1201 18:34:42.518779 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 01 18:34:42 crc kubenswrapper[4935]: I1201 18:34:42.519221 4935 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 01 18:34:42 crc kubenswrapper[4935]: I1201 18:34:42.535078 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 01 18:34:42 crc kubenswrapper[4935]: I1201 18:34:42.535172 4935 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="36e29a52-b8cc-4996-92b2-583ac480be88" Dec 01 18:34:42 crc kubenswrapper[4935]: I1201 18:34:42.539781 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 01 18:34:42 crc kubenswrapper[4935]: I1201 18:34:42.539843 4935 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="36e29a52-b8cc-4996-92b2-583ac480be88" Dec 01 18:35:04 crc kubenswrapper[4935]: I1201 18:35:04.961774 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t4jgn"] Dec 01 18:35:04 crc kubenswrapper[4935]: I1201 18:35:04.962945 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" podUID="ebcaf8e6-60d5-43ee-993d-9ed20564e23f" containerName="controller-manager" containerID="cri-o://4d2dfe5822c81eed5fc3ce9393f2d38c204e1e4f9b31dfc36752821eec934255" gracePeriod=30 Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.059356 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv"] Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.059716 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" podUID="d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f" containerName="route-controller-manager" containerID="cri-o://ec9ec63deddf9a6ab5ee45ac7f33e221e6c47955a0181a42eb46405b64962dc1" gracePeriod=30 Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.376606 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.440612 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.475972 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-proxy-ca-bundles\") pod \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.476065 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-config\") pod \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.476120 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zhs4\" (UniqueName: \"kubernetes.io/projected/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-kube-api-access-2zhs4\") pod \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.476189 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-serving-cert\") pod \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.476245 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-client-ca\") pod \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\" (UID: \"ebcaf8e6-60d5-43ee-993d-9ed20564e23f\") " Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.477424 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-client-ca" (OuterVolumeSpecName: "client-ca") pod "ebcaf8e6-60d5-43ee-993d-9ed20564e23f" (UID: "ebcaf8e6-60d5-43ee-993d-9ed20564e23f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.477504 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-config" (OuterVolumeSpecName: "config") pod "ebcaf8e6-60d5-43ee-993d-9ed20564e23f" (UID: "ebcaf8e6-60d5-43ee-993d-9ed20564e23f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.478066 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "ebcaf8e6-60d5-43ee-993d-9ed20564e23f" (UID: "ebcaf8e6-60d5-43ee-993d-9ed20564e23f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.486031 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-kube-api-access-2zhs4" (OuterVolumeSpecName: "kube-api-access-2zhs4") pod "ebcaf8e6-60d5-43ee-993d-9ed20564e23f" (UID: "ebcaf8e6-60d5-43ee-993d-9ed20564e23f"). InnerVolumeSpecName "kube-api-access-2zhs4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.486649 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ebcaf8e6-60d5-43ee-993d-9ed20564e23f" (UID: "ebcaf8e6-60d5-43ee-993d-9ed20564e23f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.577388 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhhbv\" (UniqueName: \"kubernetes.io/projected/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-kube-api-access-vhhbv\") pod \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.577466 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-serving-cert\") pod \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.577619 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-client-ca\") pod \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.577726 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-config\") pod \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\" (UID: \"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f\") " Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.579280 4935 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.579352 4935 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.579380 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.579368 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-client-ca" (OuterVolumeSpecName: "client-ca") pod "d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f" (UID: "d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.579415 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-config" (OuterVolumeSpecName: "config") pod "d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f" (UID: "d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.579402 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zhs4\" (UniqueName: \"kubernetes.io/projected/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-kube-api-access-2zhs4\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.579481 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebcaf8e6-60d5-43ee-993d-9ed20564e23f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.582650 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f" (UID: "d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.584097 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-kube-api-access-vhhbv" (OuterVolumeSpecName: "kube-api-access-vhhbv") pod "d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f" (UID: "d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f"). InnerVolumeSpecName "kube-api-access-vhhbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.681602 4935 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.681657 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.681677 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhhbv\" (UniqueName: \"kubernetes.io/projected/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-kube-api-access-vhhbv\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.681700 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.903597 4935 generic.go:334] "Generic (PLEG): container finished" podID="d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f" containerID="ec9ec63deddf9a6ab5ee45ac7f33e221e6c47955a0181a42eb46405b64962dc1" exitCode=0 Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.903852 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.903819 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" event={"ID":"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f","Type":"ContainerDied","Data":"ec9ec63deddf9a6ab5ee45ac7f33e221e6c47955a0181a42eb46405b64962dc1"} Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.905287 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv" event={"ID":"d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f","Type":"ContainerDied","Data":"a031f88e4ac011a643bcc027f3955b0d47df904ffceb548eb0c88e0e4df0c1ad"} Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.905586 4935 scope.go:117] "RemoveContainer" containerID="ec9ec63deddf9a6ab5ee45ac7f33e221e6c47955a0181a42eb46405b64962dc1" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.909286 4935 generic.go:334] "Generic (PLEG): container finished" podID="ebcaf8e6-60d5-43ee-993d-9ed20564e23f" containerID="4d2dfe5822c81eed5fc3ce9393f2d38c204e1e4f9b31dfc36752821eec934255" exitCode=0 Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.909349 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" event={"ID":"ebcaf8e6-60d5-43ee-993d-9ed20564e23f","Type":"ContainerDied","Data":"4d2dfe5822c81eed5fc3ce9393f2d38c204e1e4f9b31dfc36752821eec934255"} Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.909406 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" event={"ID":"ebcaf8e6-60d5-43ee-993d-9ed20564e23f","Type":"ContainerDied","Data":"2cb76fb5db3a2653d6f0aa3ae751405eee398cb54f9bfcff21444fb7f2d94165"} Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.909554 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-t4jgn" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.940393 4935 scope.go:117] "RemoveContainer" containerID="ec9ec63deddf9a6ab5ee45ac7f33e221e6c47955a0181a42eb46405b64962dc1" Dec 01 18:35:05 crc kubenswrapper[4935]: E1201 18:35:05.941140 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec9ec63deddf9a6ab5ee45ac7f33e221e6c47955a0181a42eb46405b64962dc1\": container with ID starting with ec9ec63deddf9a6ab5ee45ac7f33e221e6c47955a0181a42eb46405b64962dc1 not found: ID does not exist" containerID="ec9ec63deddf9a6ab5ee45ac7f33e221e6c47955a0181a42eb46405b64962dc1" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.941190 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec9ec63deddf9a6ab5ee45ac7f33e221e6c47955a0181a42eb46405b64962dc1"} err="failed to get container status \"ec9ec63deddf9a6ab5ee45ac7f33e221e6c47955a0181a42eb46405b64962dc1\": rpc error: code = NotFound desc = could not find container \"ec9ec63deddf9a6ab5ee45ac7f33e221e6c47955a0181a42eb46405b64962dc1\": container with ID starting with ec9ec63deddf9a6ab5ee45ac7f33e221e6c47955a0181a42eb46405b64962dc1 not found: ID does not exist" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.941232 4935 scope.go:117] "RemoveContainer" containerID="4d2dfe5822c81eed5fc3ce9393f2d38c204e1e4f9b31dfc36752821eec934255" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.942018 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv"] Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.946556 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4dmcv"] Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.959597 4935 scope.go:117] "RemoveContainer" containerID="4d2dfe5822c81eed5fc3ce9393f2d38c204e1e4f9b31dfc36752821eec934255" Dec 01 18:35:05 crc kubenswrapper[4935]: E1201 18:35:05.960669 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d2dfe5822c81eed5fc3ce9393f2d38c204e1e4f9b31dfc36752821eec934255\": container with ID starting with 4d2dfe5822c81eed5fc3ce9393f2d38c204e1e4f9b31dfc36752821eec934255 not found: ID does not exist" containerID="4d2dfe5822c81eed5fc3ce9393f2d38c204e1e4f9b31dfc36752821eec934255" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.960702 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d2dfe5822c81eed5fc3ce9393f2d38c204e1e4f9b31dfc36752821eec934255"} err="failed to get container status \"4d2dfe5822c81eed5fc3ce9393f2d38c204e1e4f9b31dfc36752821eec934255\": rpc error: code = NotFound desc = could not find container \"4d2dfe5822c81eed5fc3ce9393f2d38c204e1e4f9b31dfc36752821eec934255\": container with ID starting with 4d2dfe5822c81eed5fc3ce9393f2d38c204e1e4f9b31dfc36752821eec934255 not found: ID does not exist" Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.966126 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t4jgn"] Dec 01 18:35:05 crc kubenswrapper[4935]: I1201 18:35:05.972476 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t4jgn"] Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.520665 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f" path="/var/lib/kubelet/pods/d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f/volumes" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.521522 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebcaf8e6-60d5-43ee-993d-9ed20564e23f" path="/var/lib/kubelet/pods/ebcaf8e6-60d5-43ee-993d-9ed20564e23f/volumes" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.654765 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7"] Dec 01 18:35:06 crc kubenswrapper[4935]: E1201 18:35:06.655140 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebcaf8e6-60d5-43ee-993d-9ed20564e23f" containerName="controller-manager" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.655180 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebcaf8e6-60d5-43ee-993d-9ed20564e23f" containerName="controller-manager" Dec 01 18:35:06 crc kubenswrapper[4935]: E1201 18:35:06.655209 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f" containerName="route-controller-manager" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.655218 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f" containerName="route-controller-manager" Dec 01 18:35:06 crc kubenswrapper[4935]: E1201 18:35:06.655237 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.655246 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.655372 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.655395 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebcaf8e6-60d5-43ee-993d-9ed20564e23f" containerName="controller-manager" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.655413 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="d15c8b60-4339-4f4f-aaa4-d2111f9a6c9f" containerName="route-controller-manager" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.655980 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.658769 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq"] Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.658784 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.659126 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.659396 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.659673 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.659727 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.659798 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.659834 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.662476 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.662765 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.662947 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.663084 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.663238 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.667207 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.675304 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.681714 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq"] Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.687964 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7"] Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.803337 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-config\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.803394 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-client-ca\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.803427 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-proxy-ca-bundles\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.803445 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/67a3e4a8-d918-48d6-adc0-edf997699f14-client-ca\") pod \"route-controller-manager-559846b6c5-7mjx7\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.803520 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67a3e4a8-d918-48d6-adc0-edf997699f14-config\") pod \"route-controller-manager-559846b6c5-7mjx7\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.803624 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqgnp\" (UniqueName: \"kubernetes.io/projected/32c452d6-4b6b-48a6-918d-0dcf947f2a22-kube-api-access-jqgnp\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.803729 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32c452d6-4b6b-48a6-918d-0dcf947f2a22-serving-cert\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.803769 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67a3e4a8-d918-48d6-adc0-edf997699f14-serving-cert\") pod \"route-controller-manager-559846b6c5-7mjx7\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.803847 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tjgv\" (UniqueName: \"kubernetes.io/projected/67a3e4a8-d918-48d6-adc0-edf997699f14-kube-api-access-5tjgv\") pod \"route-controller-manager-559846b6c5-7mjx7\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.905034 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67a3e4a8-d918-48d6-adc0-edf997699f14-serving-cert\") pod \"route-controller-manager-559846b6c5-7mjx7\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.905102 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tjgv\" (UniqueName: \"kubernetes.io/projected/67a3e4a8-d918-48d6-adc0-edf997699f14-kube-api-access-5tjgv\") pod \"route-controller-manager-559846b6c5-7mjx7\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.905174 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-config\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.905196 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-client-ca\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.905218 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-proxy-ca-bundles\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.905237 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/67a3e4a8-d918-48d6-adc0-edf997699f14-client-ca\") pod \"route-controller-manager-559846b6c5-7mjx7\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.905262 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67a3e4a8-d918-48d6-adc0-edf997699f14-config\") pod \"route-controller-manager-559846b6c5-7mjx7\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.905282 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqgnp\" (UniqueName: \"kubernetes.io/projected/32c452d6-4b6b-48a6-918d-0dcf947f2a22-kube-api-access-jqgnp\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.905317 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32c452d6-4b6b-48a6-918d-0dcf947f2a22-serving-cert\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.906428 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/67a3e4a8-d918-48d6-adc0-edf997699f14-client-ca\") pod \"route-controller-manager-559846b6c5-7mjx7\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.906627 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-proxy-ca-bundles\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.906879 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67a3e4a8-d918-48d6-adc0-edf997699f14-config\") pod \"route-controller-manager-559846b6c5-7mjx7\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.907000 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-config\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.907382 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-client-ca\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.910831 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67a3e4a8-d918-48d6-adc0-edf997699f14-serving-cert\") pod \"route-controller-manager-559846b6c5-7mjx7\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.915234 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32c452d6-4b6b-48a6-918d-0dcf947f2a22-serving-cert\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.932706 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tjgv\" (UniqueName: \"kubernetes.io/projected/67a3e4a8-d918-48d6-adc0-edf997699f14-kube-api-access-5tjgv\") pod \"route-controller-manager-559846b6c5-7mjx7\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.937017 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqgnp\" (UniqueName: \"kubernetes.io/projected/32c452d6-4b6b-48a6-918d-0dcf947f2a22-kube-api-access-jqgnp\") pod \"controller-manager-7dbb8cdfc9-2htkq\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.974123 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:06 crc kubenswrapper[4935]: I1201 18:35:06.990410 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:07 crc kubenswrapper[4935]: I1201 18:35:07.200813 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq"] Dec 01 18:35:07 crc kubenswrapper[4935]: I1201 18:35:07.247947 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7"] Dec 01 18:35:07 crc kubenswrapper[4935]: W1201 18:35:07.256185 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67a3e4a8_d918_48d6_adc0_edf997699f14.slice/crio-dfa90365d8c7d43f61a9f00f473736183c620162810e23b3bf05e0cdc0e61b79 WatchSource:0}: Error finding container dfa90365d8c7d43f61a9f00f473736183c620162810e23b3bf05e0cdc0e61b79: Status 404 returned error can't find the container with id dfa90365d8c7d43f61a9f00f473736183c620162810e23b3bf05e0cdc0e61b79 Dec 01 18:35:07 crc kubenswrapper[4935]: I1201 18:35:07.932185 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" event={"ID":"32c452d6-4b6b-48a6-918d-0dcf947f2a22","Type":"ContainerStarted","Data":"46ffed861fff0ec08c2c82f0acbb1abd781b03fb00cbf811bbadfa7cdcfc2353"} Dec 01 18:35:07 crc kubenswrapper[4935]: I1201 18:35:07.932839 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" event={"ID":"32c452d6-4b6b-48a6-918d-0dcf947f2a22","Type":"ContainerStarted","Data":"56f16eb24be742889ab3002fa59d80871adadca1c191bd93cfc30843b18704c7"} Dec 01 18:35:07 crc kubenswrapper[4935]: I1201 18:35:07.932871 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:07 crc kubenswrapper[4935]: I1201 18:35:07.933735 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" event={"ID":"67a3e4a8-d918-48d6-adc0-edf997699f14","Type":"ContainerStarted","Data":"a9212783fedbba4d5fce7ab52adaa4fd072fb6f3c11ed92ad3ed535a012ae97b"} Dec 01 18:35:07 crc kubenswrapper[4935]: I1201 18:35:07.933770 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" event={"ID":"67a3e4a8-d918-48d6-adc0-edf997699f14","Type":"ContainerStarted","Data":"dfa90365d8c7d43f61a9f00f473736183c620162810e23b3bf05e0cdc0e61b79"} Dec 01 18:35:07 crc kubenswrapper[4935]: I1201 18:35:07.934031 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:07 crc kubenswrapper[4935]: I1201 18:35:07.941776 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:07 crc kubenswrapper[4935]: I1201 18:35:07.972646 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" podStartSLOduration=2.972610318 podStartE2EDuration="2.972610318s" podCreationTimestamp="2025-12-01 18:35:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:35:07.956252016 +0000 UTC m=+321.977881295" watchObservedRunningTime="2025-12-01 18:35:07.972610318 +0000 UTC m=+321.994239587" Dec 01 18:35:07 crc kubenswrapper[4935]: I1201 18:35:07.995133 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" podStartSLOduration=2.995099311 podStartE2EDuration="2.995099311s" podCreationTimestamp="2025-12-01 18:35:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:35:07.991511769 +0000 UTC m=+322.013141048" watchObservedRunningTime="2025-12-01 18:35:07.995099311 +0000 UTC m=+322.016728560" Dec 01 18:35:08 crc kubenswrapper[4935]: I1201 18:35:08.183544 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.011227 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-64qpm"] Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.012133 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.027745 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-64qpm"] Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.154202 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8815aeb1-da10-4299-b738-1456ea8a86fe-ca-trust-extracted\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.154264 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8815aeb1-da10-4299-b738-1456ea8a86fe-trusted-ca\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.154290 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8815aeb1-da10-4299-b738-1456ea8a86fe-installation-pull-secrets\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.154320 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8815aeb1-da10-4299-b738-1456ea8a86fe-registry-certificates\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.154403 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8815aeb1-da10-4299-b738-1456ea8a86fe-bound-sa-token\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.154432 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9jjn\" (UniqueName: \"kubernetes.io/projected/8815aeb1-da10-4299-b738-1456ea8a86fe-kube-api-access-f9jjn\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.154575 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.154661 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8815aeb1-da10-4299-b738-1456ea8a86fe-registry-tls\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.195953 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.256778 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8815aeb1-da10-4299-b738-1456ea8a86fe-ca-trust-extracted\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.257123 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8815aeb1-da10-4299-b738-1456ea8a86fe-trusted-ca\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.257230 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8815aeb1-da10-4299-b738-1456ea8a86fe-installation-pull-secrets\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.257330 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8815aeb1-da10-4299-b738-1456ea8a86fe-registry-certificates\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.257411 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8815aeb1-da10-4299-b738-1456ea8a86fe-ca-trust-extracted\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.257499 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8815aeb1-da10-4299-b738-1456ea8a86fe-bound-sa-token\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.257579 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9jjn\" (UniqueName: \"kubernetes.io/projected/8815aeb1-da10-4299-b738-1456ea8a86fe-kube-api-access-f9jjn\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.257674 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8815aeb1-da10-4299-b738-1456ea8a86fe-registry-tls\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.258707 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8815aeb1-da10-4299-b738-1456ea8a86fe-registry-certificates\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.258908 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8815aeb1-da10-4299-b738-1456ea8a86fe-trusted-ca\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.264820 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8815aeb1-da10-4299-b738-1456ea8a86fe-installation-pull-secrets\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.267846 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8815aeb1-da10-4299-b738-1456ea8a86fe-registry-tls\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.278520 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9jjn\" (UniqueName: \"kubernetes.io/projected/8815aeb1-da10-4299-b738-1456ea8a86fe-kube-api-access-f9jjn\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.280387 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8815aeb1-da10-4299-b738-1456ea8a86fe-bound-sa-token\") pod \"image-registry-66df7c8f76-64qpm\" (UID: \"8815aeb1-da10-4299-b738-1456ea8a86fe\") " pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.327061 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.848985 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-64qpm"] Dec 01 18:35:10 crc kubenswrapper[4935]: W1201 18:35:10.854286 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8815aeb1_da10_4299_b738_1456ea8a86fe.slice/crio-78130ab3c2e7a541e1b94409b42d6cf598e55ed8ccec712ac1e2137aafecd0c5 WatchSource:0}: Error finding container 78130ab3c2e7a541e1b94409b42d6cf598e55ed8ccec712ac1e2137aafecd0c5: Status 404 returned error can't find the container with id 78130ab3c2e7a541e1b94409b42d6cf598e55ed8ccec712ac1e2137aafecd0c5 Dec 01 18:35:10 crc kubenswrapper[4935]: I1201 18:35:10.951427 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" event={"ID":"8815aeb1-da10-4299-b738-1456ea8a86fe","Type":"ContainerStarted","Data":"78130ab3c2e7a541e1b94409b42d6cf598e55ed8ccec712ac1e2137aafecd0c5"} Dec 01 18:35:11 crc kubenswrapper[4935]: I1201 18:35:11.958836 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" event={"ID":"8815aeb1-da10-4299-b738-1456ea8a86fe","Type":"ContainerStarted","Data":"800742c5f3266ecd8d1b112e7376405712cb8abc50618902ab95de7540c81507"} Dec 01 18:35:11 crc kubenswrapper[4935]: I1201 18:35:11.959677 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:11 crc kubenswrapper[4935]: I1201 18:35:11.993467 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" podStartSLOduration=2.993448958 podStartE2EDuration="2.993448958s" podCreationTimestamp="2025-12-01 18:35:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:35:11.988458902 +0000 UTC m=+326.010088171" watchObservedRunningTime="2025-12-01 18:35:11.993448958 +0000 UTC m=+326.015078217" Dec 01 18:35:24 crc kubenswrapper[4935]: I1201 18:35:24.346408 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:35:24 crc kubenswrapper[4935]: I1201 18:35:24.347254 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:35:24 crc kubenswrapper[4935]: I1201 18:35:24.954424 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq"] Dec 01 18:35:24 crc kubenswrapper[4935]: I1201 18:35:24.954719 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" podUID="32c452d6-4b6b-48a6-918d-0dcf947f2a22" containerName="controller-manager" containerID="cri-o://46ffed861fff0ec08c2c82f0acbb1abd781b03fb00cbf811bbadfa7cdcfc2353" gracePeriod=30 Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.582189 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.702438 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqgnp\" (UniqueName: \"kubernetes.io/projected/32c452d6-4b6b-48a6-918d-0dcf947f2a22-kube-api-access-jqgnp\") pod \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.702525 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-client-ca\") pod \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.702618 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-config\") pod \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.702695 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-proxy-ca-bundles\") pod \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.702766 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32c452d6-4b6b-48a6-918d-0dcf947f2a22-serving-cert\") pod \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\" (UID: \"32c452d6-4b6b-48a6-918d-0dcf947f2a22\") " Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.703462 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "32c452d6-4b6b-48a6-918d-0dcf947f2a22" (UID: "32c452d6-4b6b-48a6-918d-0dcf947f2a22"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.703544 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-client-ca" (OuterVolumeSpecName: "client-ca") pod "32c452d6-4b6b-48a6-918d-0dcf947f2a22" (UID: "32c452d6-4b6b-48a6-918d-0dcf947f2a22"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.703768 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-config" (OuterVolumeSpecName: "config") pod "32c452d6-4b6b-48a6-918d-0dcf947f2a22" (UID: "32c452d6-4b6b-48a6-918d-0dcf947f2a22"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.710085 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c452d6-4b6b-48a6-918d-0dcf947f2a22-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "32c452d6-4b6b-48a6-918d-0dcf947f2a22" (UID: "32c452d6-4b6b-48a6-918d-0dcf947f2a22"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.710964 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32c452d6-4b6b-48a6-918d-0dcf947f2a22-kube-api-access-jqgnp" (OuterVolumeSpecName: "kube-api-access-jqgnp") pod "32c452d6-4b6b-48a6-918d-0dcf947f2a22" (UID: "32c452d6-4b6b-48a6-918d-0dcf947f2a22"). InnerVolumeSpecName "kube-api-access-jqgnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.804029 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.804083 4935 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.804104 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32c452d6-4b6b-48a6-918d-0dcf947f2a22-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.804118 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqgnp\" (UniqueName: \"kubernetes.io/projected/32c452d6-4b6b-48a6-918d-0dcf947f2a22-kube-api-access-jqgnp\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:25 crc kubenswrapper[4935]: I1201 18:35:25.804132 4935 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/32c452d6-4b6b-48a6-918d-0dcf947f2a22-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.065738 4935 generic.go:334] "Generic (PLEG): container finished" podID="32c452d6-4b6b-48a6-918d-0dcf947f2a22" containerID="46ffed861fff0ec08c2c82f0acbb1abd781b03fb00cbf811bbadfa7cdcfc2353" exitCode=0 Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.065800 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" event={"ID":"32c452d6-4b6b-48a6-918d-0dcf947f2a22","Type":"ContainerDied","Data":"46ffed861fff0ec08c2c82f0acbb1abd781b03fb00cbf811bbadfa7cdcfc2353"} Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.065852 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.065905 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq" event={"ID":"32c452d6-4b6b-48a6-918d-0dcf947f2a22","Type":"ContainerDied","Data":"56f16eb24be742889ab3002fa59d80871adadca1c191bd93cfc30843b18704c7"} Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.065933 4935 scope.go:117] "RemoveContainer" containerID="46ffed861fff0ec08c2c82f0acbb1abd781b03fb00cbf811bbadfa7cdcfc2353" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.095730 4935 scope.go:117] "RemoveContainer" containerID="46ffed861fff0ec08c2c82f0acbb1abd781b03fb00cbf811bbadfa7cdcfc2353" Dec 01 18:35:26 crc kubenswrapper[4935]: E1201 18:35:26.096627 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46ffed861fff0ec08c2c82f0acbb1abd781b03fb00cbf811bbadfa7cdcfc2353\": container with ID starting with 46ffed861fff0ec08c2c82f0acbb1abd781b03fb00cbf811bbadfa7cdcfc2353 not found: ID does not exist" containerID="46ffed861fff0ec08c2c82f0acbb1abd781b03fb00cbf811bbadfa7cdcfc2353" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.096696 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46ffed861fff0ec08c2c82f0acbb1abd781b03fb00cbf811bbadfa7cdcfc2353"} err="failed to get container status \"46ffed861fff0ec08c2c82f0acbb1abd781b03fb00cbf811bbadfa7cdcfc2353\": rpc error: code = NotFound desc = could not find container \"46ffed861fff0ec08c2c82f0acbb1abd781b03fb00cbf811bbadfa7cdcfc2353\": container with ID starting with 46ffed861fff0ec08c2c82f0acbb1abd781b03fb00cbf811bbadfa7cdcfc2353 not found: ID does not exist" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.104092 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq"] Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.110656 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7dbb8cdfc9-2htkq"] Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.521884 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32c452d6-4b6b-48a6-918d-0dcf947f2a22" path="/var/lib/kubelet/pods/32c452d6-4b6b-48a6-918d-0dcf947f2a22/volumes" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.674384 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-85794df44c-msg59"] Dec 01 18:35:26 crc kubenswrapper[4935]: E1201 18:35:26.674757 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32c452d6-4b6b-48a6-918d-0dcf947f2a22" containerName="controller-manager" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.674779 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="32c452d6-4b6b-48a6-918d-0dcf947f2a22" containerName="controller-manager" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.674957 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="32c452d6-4b6b-48a6-918d-0dcf947f2a22" containerName="controller-manager" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.675850 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.680320 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.680967 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.681265 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.681576 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.681832 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.683303 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.697097 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.704637 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-85794df44c-msg59"] Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.820601 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-config\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.820709 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-proxy-ca-bundles\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.820747 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-client-ca\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.820784 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpwbl\" (UniqueName: \"kubernetes.io/projected/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-kube-api-access-kpwbl\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.820829 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-serving-cert\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.922375 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-config\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.922574 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-proxy-ca-bundles\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.922627 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-client-ca\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.922665 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpwbl\" (UniqueName: \"kubernetes.io/projected/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-kube-api-access-kpwbl\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.922723 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-serving-cert\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.924376 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-client-ca\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.926296 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-proxy-ca-bundles\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.927767 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-config\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.930675 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-serving-cert\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:26 crc kubenswrapper[4935]: I1201 18:35:26.959835 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpwbl\" (UniqueName: \"kubernetes.io/projected/add2e3e6-aac2-4aa8-8ee6-c5160a73fec0-kube-api-access-kpwbl\") pod \"controller-manager-85794df44c-msg59\" (UID: \"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0\") " pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:27 crc kubenswrapper[4935]: I1201 18:35:27.009763 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:27 crc kubenswrapper[4935]: I1201 18:35:27.267744 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-85794df44c-msg59"] Dec 01 18:35:28 crc kubenswrapper[4935]: I1201 18:35:28.087887 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85794df44c-msg59" event={"ID":"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0","Type":"ContainerStarted","Data":"98a52cafa4668fb8a067cc0d365edc17d2b8188a57a2aee5f74747660917e48f"} Dec 01 18:35:28 crc kubenswrapper[4935]: I1201 18:35:28.088446 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85794df44c-msg59" event={"ID":"add2e3e6-aac2-4aa8-8ee6-c5160a73fec0","Type":"ContainerStarted","Data":"df2f6d819c243d1f8ad892fb05f4fd51162640040147e678e443fe663c1dcaea"} Dec 01 18:35:28 crc kubenswrapper[4935]: I1201 18:35:28.088511 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:28 crc kubenswrapper[4935]: I1201 18:35:28.094399 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-85794df44c-msg59" Dec 01 18:35:28 crc kubenswrapper[4935]: I1201 18:35:28.106057 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-85794df44c-msg59" podStartSLOduration=4.106032271 podStartE2EDuration="4.106032271s" podCreationTimestamp="2025-12-01 18:35:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:35:28.105633418 +0000 UTC m=+342.127262717" watchObservedRunningTime="2025-12-01 18:35:28.106032271 +0000 UTC m=+342.127661530" Dec 01 18:35:30 crc kubenswrapper[4935]: I1201 18:35:30.336839 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-64qpm" Dec 01 18:35:30 crc kubenswrapper[4935]: I1201 18:35:30.460513 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-89k72"] Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.643260 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zpnpj"] Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.644200 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zpnpj" podUID="0efae526-1f2b-44b5-b69e-64af2f426aa8" containerName="registry-server" containerID="cri-o://eb5b1a9759a2ca834161f020c773fb4a6a62bec4451fedbdc489470927250dcc" gracePeriod=30 Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.658609 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gkmsh"] Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.658884 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gkmsh" podUID="66550924-3006-4d90-b516-ac5ea6155bbc" containerName="registry-server" containerID="cri-o://70a9b3ee46e1c2d71b49774c53fa5584addba00784142d22d9ff8d7c9f3cff8d" gracePeriod=30 Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.662820 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k4g4f"] Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.663069 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" podUID="b123b9f1-7d6b-496c-87c2-7790b027abd6" containerName="marketplace-operator" containerID="cri-o://97c93b96ebfda3c3d2c58a31109963e7a3b8bb0c85463d688df59d5b09bd54e8" gracePeriod=30 Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.679999 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rtvzn"] Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.680316 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rtvzn" podUID="585dfe75-4262-4b8b-9874-25e51b01cafd" containerName="registry-server" containerID="cri-o://875792e43fb296094376806a73a83a2027f50819390fe61d061a348ee9c6ec96" gracePeriod=30 Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.686901 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-67krg"] Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.687178 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-67krg" podUID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" containerName="registry-server" containerID="cri-o://c8a5232e055bf97a008a501873130f3f70293cbf8f4a9d5cb202527b62494984" gracePeriod=30 Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.708906 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4fmtp"] Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.710314 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.718745 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4fmtp"] Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.794998 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/76d4e37a-f26a-4bb0-bbaf-91be51709278-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4fmtp\" (UID: \"76d4e37a-f26a-4bb0-bbaf-91be51709278\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.795039 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/76d4e37a-f26a-4bb0-bbaf-91be51709278-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4fmtp\" (UID: \"76d4e37a-f26a-4bb0-bbaf-91be51709278\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.795085 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ztzp\" (UniqueName: \"kubernetes.io/projected/76d4e37a-f26a-4bb0-bbaf-91be51709278-kube-api-access-9ztzp\") pod \"marketplace-operator-79b997595-4fmtp\" (UID: \"76d4e37a-f26a-4bb0-bbaf-91be51709278\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.897368 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ztzp\" (UniqueName: \"kubernetes.io/projected/76d4e37a-f26a-4bb0-bbaf-91be51709278-kube-api-access-9ztzp\") pod \"marketplace-operator-79b997595-4fmtp\" (UID: \"76d4e37a-f26a-4bb0-bbaf-91be51709278\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.897721 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/76d4e37a-f26a-4bb0-bbaf-91be51709278-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4fmtp\" (UID: \"76d4e37a-f26a-4bb0-bbaf-91be51709278\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.897762 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/76d4e37a-f26a-4bb0-bbaf-91be51709278-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4fmtp\" (UID: \"76d4e37a-f26a-4bb0-bbaf-91be51709278\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.899493 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/76d4e37a-f26a-4bb0-bbaf-91be51709278-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4fmtp\" (UID: \"76d4e37a-f26a-4bb0-bbaf-91be51709278\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.908391 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/76d4e37a-f26a-4bb0-bbaf-91be51709278-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4fmtp\" (UID: \"76d4e37a-f26a-4bb0-bbaf-91be51709278\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" Dec 01 18:35:53 crc kubenswrapper[4935]: I1201 18:35:53.922233 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ztzp\" (UniqueName: \"kubernetes.io/projected/76d4e37a-f26a-4bb0-bbaf-91be51709278-kube-api-access-9ztzp\") pod \"marketplace-operator-79b997595-4fmtp\" (UID: \"76d4e37a-f26a-4bb0-bbaf-91be51709278\") " pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.171971 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" Dec 01 18:35:54 crc kubenswrapper[4935]: E1201 18:35:54.209115 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 875792e43fb296094376806a73a83a2027f50819390fe61d061a348ee9c6ec96 is running failed: container process not found" containerID="875792e43fb296094376806a73a83a2027f50819390fe61d061a348ee9c6ec96" cmd=["grpc_health_probe","-addr=:50051"] Dec 01 18:35:54 crc kubenswrapper[4935]: E1201 18:35:54.209574 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 875792e43fb296094376806a73a83a2027f50819390fe61d061a348ee9c6ec96 is running failed: container process not found" containerID="875792e43fb296094376806a73a83a2027f50819390fe61d061a348ee9c6ec96" cmd=["grpc_health_probe","-addr=:50051"] Dec 01 18:35:54 crc kubenswrapper[4935]: E1201 18:35:54.209905 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 875792e43fb296094376806a73a83a2027f50819390fe61d061a348ee9c6ec96 is running failed: container process not found" containerID="875792e43fb296094376806a73a83a2027f50819390fe61d061a348ee9c6ec96" cmd=["grpc_health_probe","-addr=:50051"] Dec 01 18:35:54 crc kubenswrapper[4935]: E1201 18:35:54.209939 4935 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 875792e43fb296094376806a73a83a2027f50819390fe61d061a348ee9c6ec96 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-rtvzn" podUID="585dfe75-4262-4b8b-9874-25e51b01cafd" containerName="registry-server" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.267979 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.302164 4935 generic.go:334] "Generic (PLEG): container finished" podID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" containerID="c8a5232e055bf97a008a501873130f3f70293cbf8f4a9d5cb202527b62494984" exitCode=0 Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.302272 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67krg" event={"ID":"91c00d11-75b5-492f-8a4d-74e87a6aa2fe","Type":"ContainerDied","Data":"c8a5232e055bf97a008a501873130f3f70293cbf8f4a9d5cb202527b62494984"} Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.302319 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67krg" event={"ID":"91c00d11-75b5-492f-8a4d-74e87a6aa2fe","Type":"ContainerDied","Data":"5951c873011cc706c15431968bbc6989a07227b96ef3b9063070a213e3cdb913"} Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.302347 4935 scope.go:117] "RemoveContainer" containerID="c8a5232e055bf97a008a501873130f3f70293cbf8f4a9d5cb202527b62494984" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.302549 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-67krg" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.322594 4935 generic.go:334] "Generic (PLEG): container finished" podID="585dfe75-4262-4b8b-9874-25e51b01cafd" containerID="875792e43fb296094376806a73a83a2027f50819390fe61d061a348ee9c6ec96" exitCode=0 Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.322689 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rtvzn" event={"ID":"585dfe75-4262-4b8b-9874-25e51b01cafd","Type":"ContainerDied","Data":"875792e43fb296094376806a73a83a2027f50819390fe61d061a348ee9c6ec96"} Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.338436 4935 generic.go:334] "Generic (PLEG): container finished" podID="0efae526-1f2b-44b5-b69e-64af2f426aa8" containerID="eb5b1a9759a2ca834161f020c773fb4a6a62bec4451fedbdc489470927250dcc" exitCode=0 Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.338584 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zpnpj" event={"ID":"0efae526-1f2b-44b5-b69e-64af2f426aa8","Type":"ContainerDied","Data":"eb5b1a9759a2ca834161f020c773fb4a6a62bec4451fedbdc489470927250dcc"} Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.342615 4935 generic.go:334] "Generic (PLEG): container finished" podID="b123b9f1-7d6b-496c-87c2-7790b027abd6" containerID="97c93b96ebfda3c3d2c58a31109963e7a3b8bb0c85463d688df59d5b09bd54e8" exitCode=0 Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.344136 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" event={"ID":"b123b9f1-7d6b-496c-87c2-7790b027abd6","Type":"ContainerDied","Data":"97c93b96ebfda3c3d2c58a31109963e7a3b8bb0c85463d688df59d5b09bd54e8"} Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.350595 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.350649 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.353189 4935 generic.go:334] "Generic (PLEG): container finished" podID="66550924-3006-4d90-b516-ac5ea6155bbc" containerID="70a9b3ee46e1c2d71b49774c53fa5584addba00784142d22d9ff8d7c9f3cff8d" exitCode=0 Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.353233 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gkmsh" event={"ID":"66550924-3006-4d90-b516-ac5ea6155bbc","Type":"ContainerDied","Data":"70a9b3ee46e1c2d71b49774c53fa5584addba00784142d22d9ff8d7c9f3cff8d"} Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.369019 4935 scope.go:117] "RemoveContainer" containerID="9f42d8e7bf84f170089d0de6dd66d05f72d5cbfebf19963b7181a269005483e7" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.413651 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pf2x\" (UniqueName: \"kubernetes.io/projected/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-kube-api-access-5pf2x\") pod \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\" (UID: \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.413998 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-catalog-content\") pod \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\" (UID: \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.414067 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-utilities\") pod \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\" (UID: \"91c00d11-75b5-492f-8a4d-74e87a6aa2fe\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.414938 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-utilities" (OuterVolumeSpecName: "utilities") pod "91c00d11-75b5-492f-8a4d-74e87a6aa2fe" (UID: "91c00d11-75b5-492f-8a4d-74e87a6aa2fe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.418557 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-kube-api-access-5pf2x" (OuterVolumeSpecName: "kube-api-access-5pf2x") pod "91c00d11-75b5-492f-8a4d-74e87a6aa2fe" (UID: "91c00d11-75b5-492f-8a4d-74e87a6aa2fe"). InnerVolumeSpecName "kube-api-access-5pf2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.449791 4935 scope.go:117] "RemoveContainer" containerID="5768c20ff6a0157bebf54df7468c52792b5f89093497c51874dc5ed9e3adbf24" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.470423 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.470739 4935 scope.go:117] "RemoveContainer" containerID="c8a5232e055bf97a008a501873130f3f70293cbf8f4a9d5cb202527b62494984" Dec 01 18:35:54 crc kubenswrapper[4935]: E1201 18:35:54.471212 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8a5232e055bf97a008a501873130f3f70293cbf8f4a9d5cb202527b62494984\": container with ID starting with c8a5232e055bf97a008a501873130f3f70293cbf8f4a9d5cb202527b62494984 not found: ID does not exist" containerID="c8a5232e055bf97a008a501873130f3f70293cbf8f4a9d5cb202527b62494984" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.471281 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8a5232e055bf97a008a501873130f3f70293cbf8f4a9d5cb202527b62494984"} err="failed to get container status \"c8a5232e055bf97a008a501873130f3f70293cbf8f4a9d5cb202527b62494984\": rpc error: code = NotFound desc = could not find container \"c8a5232e055bf97a008a501873130f3f70293cbf8f4a9d5cb202527b62494984\": container with ID starting with c8a5232e055bf97a008a501873130f3f70293cbf8f4a9d5cb202527b62494984 not found: ID does not exist" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.471323 4935 scope.go:117] "RemoveContainer" containerID="9f42d8e7bf84f170089d0de6dd66d05f72d5cbfebf19963b7181a269005483e7" Dec 01 18:35:54 crc kubenswrapper[4935]: E1201 18:35:54.471815 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f42d8e7bf84f170089d0de6dd66d05f72d5cbfebf19963b7181a269005483e7\": container with ID starting with 9f42d8e7bf84f170089d0de6dd66d05f72d5cbfebf19963b7181a269005483e7 not found: ID does not exist" containerID="9f42d8e7bf84f170089d0de6dd66d05f72d5cbfebf19963b7181a269005483e7" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.471857 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f42d8e7bf84f170089d0de6dd66d05f72d5cbfebf19963b7181a269005483e7"} err="failed to get container status \"9f42d8e7bf84f170089d0de6dd66d05f72d5cbfebf19963b7181a269005483e7\": rpc error: code = NotFound desc = could not find container \"9f42d8e7bf84f170089d0de6dd66d05f72d5cbfebf19963b7181a269005483e7\": container with ID starting with 9f42d8e7bf84f170089d0de6dd66d05f72d5cbfebf19963b7181a269005483e7 not found: ID does not exist" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.471897 4935 scope.go:117] "RemoveContainer" containerID="5768c20ff6a0157bebf54df7468c52792b5f89093497c51874dc5ed9e3adbf24" Dec 01 18:35:54 crc kubenswrapper[4935]: E1201 18:35:54.472755 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5768c20ff6a0157bebf54df7468c52792b5f89093497c51874dc5ed9e3adbf24\": container with ID starting with 5768c20ff6a0157bebf54df7468c52792b5f89093497c51874dc5ed9e3adbf24 not found: ID does not exist" containerID="5768c20ff6a0157bebf54df7468c52792b5f89093497c51874dc5ed9e3adbf24" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.472793 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5768c20ff6a0157bebf54df7468c52792b5f89093497c51874dc5ed9e3adbf24"} err="failed to get container status \"5768c20ff6a0157bebf54df7468c52792b5f89093497c51874dc5ed9e3adbf24\": rpc error: code = NotFound desc = could not find container \"5768c20ff6a0157bebf54df7468c52792b5f89093497c51874dc5ed9e3adbf24\": container with ID starting with 5768c20ff6a0157bebf54df7468c52792b5f89093497c51874dc5ed9e3adbf24 not found: ID does not exist" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.473121 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.478998 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.483075 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.516991 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pf2x\" (UniqueName: \"kubernetes.io/projected/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-kube-api-access-5pf2x\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.517023 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.591770 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "91c00d11-75b5-492f-8a4d-74e87a6aa2fe" (UID: "91c00d11-75b5-492f-8a4d-74e87a6aa2fe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.618510 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b123b9f1-7d6b-496c-87c2-7790b027abd6-marketplace-operator-metrics\") pod \"b123b9f1-7d6b-496c-87c2-7790b027abd6\" (UID: \"b123b9f1-7d6b-496c-87c2-7790b027abd6\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.618566 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0efae526-1f2b-44b5-b69e-64af2f426aa8-utilities\") pod \"0efae526-1f2b-44b5-b69e-64af2f426aa8\" (UID: \"0efae526-1f2b-44b5-b69e-64af2f426aa8\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.618596 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585dfe75-4262-4b8b-9874-25e51b01cafd-catalog-content\") pod \"585dfe75-4262-4b8b-9874-25e51b01cafd\" (UID: \"585dfe75-4262-4b8b-9874-25e51b01cafd\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.618640 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86vfr\" (UniqueName: \"kubernetes.io/projected/b123b9f1-7d6b-496c-87c2-7790b027abd6-kube-api-access-86vfr\") pod \"b123b9f1-7d6b-496c-87c2-7790b027abd6\" (UID: \"b123b9f1-7d6b-496c-87c2-7790b027abd6\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.618670 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rmnk\" (UniqueName: \"kubernetes.io/projected/66550924-3006-4d90-b516-ac5ea6155bbc-kube-api-access-2rmnk\") pod \"66550924-3006-4d90-b516-ac5ea6155bbc\" (UID: \"66550924-3006-4d90-b516-ac5ea6155bbc\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.618720 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b123b9f1-7d6b-496c-87c2-7790b027abd6-marketplace-trusted-ca\") pod \"b123b9f1-7d6b-496c-87c2-7790b027abd6\" (UID: \"b123b9f1-7d6b-496c-87c2-7790b027abd6\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.618736 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66550924-3006-4d90-b516-ac5ea6155bbc-utilities\") pod \"66550924-3006-4d90-b516-ac5ea6155bbc\" (UID: \"66550924-3006-4d90-b516-ac5ea6155bbc\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.618765 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66550924-3006-4d90-b516-ac5ea6155bbc-catalog-content\") pod \"66550924-3006-4d90-b516-ac5ea6155bbc\" (UID: \"66550924-3006-4d90-b516-ac5ea6155bbc\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.618793 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxqjg\" (UniqueName: \"kubernetes.io/projected/585dfe75-4262-4b8b-9874-25e51b01cafd-kube-api-access-cxqjg\") pod \"585dfe75-4262-4b8b-9874-25e51b01cafd\" (UID: \"585dfe75-4262-4b8b-9874-25e51b01cafd\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.618813 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0efae526-1f2b-44b5-b69e-64af2f426aa8-catalog-content\") pod \"0efae526-1f2b-44b5-b69e-64af2f426aa8\" (UID: \"0efae526-1f2b-44b5-b69e-64af2f426aa8\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.618849 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sfcrp\" (UniqueName: \"kubernetes.io/projected/0efae526-1f2b-44b5-b69e-64af2f426aa8-kube-api-access-sfcrp\") pod \"0efae526-1f2b-44b5-b69e-64af2f426aa8\" (UID: \"0efae526-1f2b-44b5-b69e-64af2f426aa8\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.618866 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585dfe75-4262-4b8b-9874-25e51b01cafd-utilities\") pod \"585dfe75-4262-4b8b-9874-25e51b01cafd\" (UID: \"585dfe75-4262-4b8b-9874-25e51b01cafd\") " Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.619097 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91c00d11-75b5-492f-8a4d-74e87a6aa2fe-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.619786 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/585dfe75-4262-4b8b-9874-25e51b01cafd-utilities" (OuterVolumeSpecName: "utilities") pod "585dfe75-4262-4b8b-9874-25e51b01cafd" (UID: "585dfe75-4262-4b8b-9874-25e51b01cafd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.620110 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0efae526-1f2b-44b5-b69e-64af2f426aa8-utilities" (OuterVolumeSpecName: "utilities") pod "0efae526-1f2b-44b5-b69e-64af2f426aa8" (UID: "0efae526-1f2b-44b5-b69e-64af2f426aa8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.620620 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66550924-3006-4d90-b516-ac5ea6155bbc-utilities" (OuterVolumeSpecName: "utilities") pod "66550924-3006-4d90-b516-ac5ea6155bbc" (UID: "66550924-3006-4d90-b516-ac5ea6155bbc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.620868 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b123b9f1-7d6b-496c-87c2-7790b027abd6-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b123b9f1-7d6b-496c-87c2-7790b027abd6" (UID: "b123b9f1-7d6b-496c-87c2-7790b027abd6"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.621898 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66550924-3006-4d90-b516-ac5ea6155bbc-kube-api-access-2rmnk" (OuterVolumeSpecName: "kube-api-access-2rmnk") pod "66550924-3006-4d90-b516-ac5ea6155bbc" (UID: "66550924-3006-4d90-b516-ac5ea6155bbc"). InnerVolumeSpecName "kube-api-access-2rmnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.626234 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b123b9f1-7d6b-496c-87c2-7790b027abd6-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b123b9f1-7d6b-496c-87c2-7790b027abd6" (UID: "b123b9f1-7d6b-496c-87c2-7790b027abd6"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.628113 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/585dfe75-4262-4b8b-9874-25e51b01cafd-kube-api-access-cxqjg" (OuterVolumeSpecName: "kube-api-access-cxqjg") pod "585dfe75-4262-4b8b-9874-25e51b01cafd" (UID: "585dfe75-4262-4b8b-9874-25e51b01cafd"). InnerVolumeSpecName "kube-api-access-cxqjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.630502 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-67krg"] Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.634791 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0efae526-1f2b-44b5-b69e-64af2f426aa8-kube-api-access-sfcrp" (OuterVolumeSpecName: "kube-api-access-sfcrp") pod "0efae526-1f2b-44b5-b69e-64af2f426aa8" (UID: "0efae526-1f2b-44b5-b69e-64af2f426aa8"). InnerVolumeSpecName "kube-api-access-sfcrp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.635383 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b123b9f1-7d6b-496c-87c2-7790b027abd6-kube-api-access-86vfr" (OuterVolumeSpecName: "kube-api-access-86vfr") pod "b123b9f1-7d6b-496c-87c2-7790b027abd6" (UID: "b123b9f1-7d6b-496c-87c2-7790b027abd6"). InnerVolumeSpecName "kube-api-access-86vfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.641504 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-67krg"] Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.642733 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/585dfe75-4262-4b8b-9874-25e51b01cafd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "585dfe75-4262-4b8b-9874-25e51b01cafd" (UID: "585dfe75-4262-4b8b-9874-25e51b01cafd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.700405 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4fmtp"] Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.720350 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0efae526-1f2b-44b5-b69e-64af2f426aa8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0efae526-1f2b-44b5-b69e-64af2f426aa8" (UID: "0efae526-1f2b-44b5-b69e-64af2f426aa8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.721073 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86vfr\" (UniqueName: \"kubernetes.io/projected/b123b9f1-7d6b-496c-87c2-7790b027abd6-kube-api-access-86vfr\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.721128 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rmnk\" (UniqueName: \"kubernetes.io/projected/66550924-3006-4d90-b516-ac5ea6155bbc-kube-api-access-2rmnk\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.721157 4935 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b123b9f1-7d6b-496c-87c2-7790b027abd6-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.721170 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66550924-3006-4d90-b516-ac5ea6155bbc-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.721183 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxqjg\" (UniqueName: \"kubernetes.io/projected/585dfe75-4262-4b8b-9874-25e51b01cafd-kube-api-access-cxqjg\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.721193 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0efae526-1f2b-44b5-b69e-64af2f426aa8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.721202 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sfcrp\" (UniqueName: \"kubernetes.io/projected/0efae526-1f2b-44b5-b69e-64af2f426aa8-kube-api-access-sfcrp\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.721212 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585dfe75-4262-4b8b-9874-25e51b01cafd-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.721220 4935 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b123b9f1-7d6b-496c-87c2-7790b027abd6-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.721230 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0efae526-1f2b-44b5-b69e-64af2f426aa8-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.721239 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585dfe75-4262-4b8b-9874-25e51b01cafd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.725788 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66550924-3006-4d90-b516-ac5ea6155bbc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "66550924-3006-4d90-b516-ac5ea6155bbc" (UID: "66550924-3006-4d90-b516-ac5ea6155bbc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:35:54 crc kubenswrapper[4935]: I1201 18:35:54.822870 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66550924-3006-4d90-b516-ac5ea6155bbc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.282980 4935 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-k4g4f container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.283420 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" podUID="b123b9f1-7d6b-496c-87c2-7790b027abd6" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.361463 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gkmsh" event={"ID":"66550924-3006-4d90-b516-ac5ea6155bbc","Type":"ContainerDied","Data":"2194d6e3df49685218f8d8d68861998f11d9a1e45de8634c3c04b6e4dcbe839f"} Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.361900 4935 scope.go:117] "RemoveContainer" containerID="70a9b3ee46e1c2d71b49774c53fa5584addba00784142d22d9ff8d7c9f3cff8d" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.361545 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gkmsh" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.362786 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" event={"ID":"76d4e37a-f26a-4bb0-bbaf-91be51709278","Type":"ContainerStarted","Data":"c0f466b4eee6d18cd7c0b55bd5b761b9f959621c53e5a8a1fcec4d134431ff27"} Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.362844 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" event={"ID":"76d4e37a-f26a-4bb0-bbaf-91be51709278","Type":"ContainerStarted","Data":"3a7f3b87df5845040843fdbac46ebaf98013ea261d30f36e3f682078f20d43eb"} Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.363101 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.366361 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rtvzn" event={"ID":"585dfe75-4262-4b8b-9874-25e51b01cafd","Type":"ContainerDied","Data":"9edfa0003dca4c0480958c4d42c12034035391df0d90e9df8cc6288f2ae6bc05"} Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.366411 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rtvzn" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.368918 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.371177 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zpnpj" event={"ID":"0efae526-1f2b-44b5-b69e-64af2f426aa8","Type":"ContainerDied","Data":"d8433b439ce6c27ff74def9ea4ef1127281f253ec3cec4c9bf625e2a7e0ff87e"} Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.371207 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zpnpj" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.373625 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" event={"ID":"b123b9f1-7d6b-496c-87c2-7790b027abd6","Type":"ContainerDied","Data":"eb943c88be27933d28775702f2dca11493ed59035987cd433373722cde2152b3"} Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.373796 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k4g4f" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.381704 4935 scope.go:117] "RemoveContainer" containerID="4e5b1a9ae17aef3ee3175d13dd1ed544ca3cd2119d36c393b420e43640409237" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.403119 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-4fmtp" podStartSLOduration=2.403090441 podStartE2EDuration="2.403090441s" podCreationTimestamp="2025-12-01 18:35:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:35:55.393491878 +0000 UTC m=+369.415121137" watchObservedRunningTime="2025-12-01 18:35:55.403090441 +0000 UTC m=+369.424719710" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.433245 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gkmsh"] Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.440557 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gkmsh"] Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.440803 4935 scope.go:117] "RemoveContainer" containerID="5a5da0878716880a651408d57c3d1d135a50c2e87be893bcc038dc2790602dcb" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.451767 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rtvzn"] Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.457305 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rtvzn"] Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.462242 4935 scope.go:117] "RemoveContainer" containerID="875792e43fb296094376806a73a83a2027f50819390fe61d061a348ee9c6ec96" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.473163 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zpnpj"] Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.481549 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zpnpj"] Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.487787 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k4g4f"] Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.488669 4935 scope.go:117] "RemoveContainer" containerID="9a6d55e8887feb1956d1914b00093b6344b49f99f67ecd0822826635bf913464" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.492055 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k4g4f"] Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.508403 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" podUID="4f81095d-3084-427f-8f0e-bdd180180c31" containerName="registry" containerID="cri-o://758e0c26883cd13f794fc41d860114619e79bfce697305bba16d165e631a5277" gracePeriod=30 Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.515000 4935 scope.go:117] "RemoveContainer" containerID="3fb7254d543be8979761acc8f84edc07d265e98ba396fe94fdd8ddf1eb2b8ab4" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.535019 4935 scope.go:117] "RemoveContainer" containerID="eb5b1a9759a2ca834161f020c773fb4a6a62bec4451fedbdc489470927250dcc" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.610313 4935 scope.go:117] "RemoveContainer" containerID="0d11602c8e73c85c5beda213c52e1daacfcc3bdcc9dff6c0420e20925dda6664" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.630539 4935 scope.go:117] "RemoveContainer" containerID="267b437c3bb6d2605400e1fd365ca449566fd021beb23765d7ed8a919643909f" Dec 01 18:35:55 crc kubenswrapper[4935]: I1201 18:35:55.649020 4935 scope.go:117] "RemoveContainer" containerID="97c93b96ebfda3c3d2c58a31109963e7a3b8bb0c85463d688df59d5b09bd54e8" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.003191 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062298 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m2wlg"] Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062580 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585dfe75-4262-4b8b-9874-25e51b01cafd" containerName="extract-content" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062596 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="585dfe75-4262-4b8b-9874-25e51b01cafd" containerName="extract-content" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062607 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585dfe75-4262-4b8b-9874-25e51b01cafd" containerName="extract-utilities" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062633 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="585dfe75-4262-4b8b-9874-25e51b01cafd" containerName="extract-utilities" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062641 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" containerName="extract-utilities" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062648 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" containerName="extract-utilities" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062660 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b123b9f1-7d6b-496c-87c2-7790b027abd6" containerName="marketplace-operator" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062666 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b123b9f1-7d6b-496c-87c2-7790b027abd6" containerName="marketplace-operator" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062678 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0efae526-1f2b-44b5-b69e-64af2f426aa8" containerName="extract-utilities" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062683 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="0efae526-1f2b-44b5-b69e-64af2f426aa8" containerName="extract-utilities" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062716 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0efae526-1f2b-44b5-b69e-64af2f426aa8" containerName="registry-server" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062724 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="0efae526-1f2b-44b5-b69e-64af2f426aa8" containerName="registry-server" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062734 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66550924-3006-4d90-b516-ac5ea6155bbc" containerName="extract-utilities" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062742 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="66550924-3006-4d90-b516-ac5ea6155bbc" containerName="extract-utilities" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062751 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66550924-3006-4d90-b516-ac5ea6155bbc" containerName="registry-server" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062757 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="66550924-3006-4d90-b516-ac5ea6155bbc" containerName="registry-server" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062769 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0efae526-1f2b-44b5-b69e-64af2f426aa8" containerName="extract-content" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062794 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="0efae526-1f2b-44b5-b69e-64af2f426aa8" containerName="extract-content" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062802 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66550924-3006-4d90-b516-ac5ea6155bbc" containerName="extract-content" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062808 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="66550924-3006-4d90-b516-ac5ea6155bbc" containerName="extract-content" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062814 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f81095d-3084-427f-8f0e-bdd180180c31" containerName="registry" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062823 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f81095d-3084-427f-8f0e-bdd180180c31" containerName="registry" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062832 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" containerName="registry-server" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062838 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" containerName="registry-server" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062845 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585dfe75-4262-4b8b-9874-25e51b01cafd" containerName="registry-server" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062870 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="585dfe75-4262-4b8b-9874-25e51b01cafd" containerName="registry-server" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.062879 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" containerName="extract-content" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062885 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" containerName="extract-content" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.062989 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" containerName="registry-server" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.063002 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="66550924-3006-4d90-b516-ac5ea6155bbc" containerName="registry-server" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.063030 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="585dfe75-4262-4b8b-9874-25e51b01cafd" containerName="registry-server" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.063039 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f81095d-3084-427f-8f0e-bdd180180c31" containerName="registry" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.063047 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="b123b9f1-7d6b-496c-87c2-7790b027abd6" containerName="marketplace-operator" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.063054 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="0efae526-1f2b-44b5-b69e-64af2f426aa8" containerName="registry-server" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.064132 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.071768 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.077072 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m2wlg"] Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.142819 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"4f81095d-3084-427f-8f0e-bdd180180c31\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.143249 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4f81095d-3084-427f-8f0e-bdd180180c31-registry-certificates\") pod \"4f81095d-3084-427f-8f0e-bdd180180c31\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.143285 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-bound-sa-token\") pod \"4f81095d-3084-427f-8f0e-bdd180180c31\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.143393 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-registry-tls\") pod \"4f81095d-3084-427f-8f0e-bdd180180c31\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.143427 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4f81095d-3084-427f-8f0e-bdd180180c31-ca-trust-extracted\") pod \"4f81095d-3084-427f-8f0e-bdd180180c31\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.143459 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdhx6\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-kube-api-access-qdhx6\") pod \"4f81095d-3084-427f-8f0e-bdd180180c31\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.143485 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f81095d-3084-427f-8f0e-bdd180180c31-trusted-ca\") pod \"4f81095d-3084-427f-8f0e-bdd180180c31\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.143519 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4f81095d-3084-427f-8f0e-bdd180180c31-installation-pull-secrets\") pod \"4f81095d-3084-427f-8f0e-bdd180180c31\" (UID: \"4f81095d-3084-427f-8f0e-bdd180180c31\") " Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.143765 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctpwp\" (UniqueName: \"kubernetes.io/projected/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-kube-api-access-ctpwp\") pod \"redhat-operators-m2wlg\" (UID: \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\") " pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.143806 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-utilities\") pod \"redhat-operators-m2wlg\" (UID: \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\") " pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.143861 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-catalog-content\") pod \"redhat-operators-m2wlg\" (UID: \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\") " pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.144429 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f81095d-3084-427f-8f0e-bdd180180c31-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "4f81095d-3084-427f-8f0e-bdd180180c31" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.145063 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f81095d-3084-427f-8f0e-bdd180180c31-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "4f81095d-3084-427f-8f0e-bdd180180c31" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.153243 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "4f81095d-3084-427f-8f0e-bdd180180c31" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.153376 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f81095d-3084-427f-8f0e-bdd180180c31-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "4f81095d-3084-427f-8f0e-bdd180180c31" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.155230 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-kube-api-access-qdhx6" (OuterVolumeSpecName: "kube-api-access-qdhx6") pod "4f81095d-3084-427f-8f0e-bdd180180c31" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31"). InnerVolumeSpecName "kube-api-access-qdhx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.162098 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "4f81095d-3084-427f-8f0e-bdd180180c31" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.162559 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "4f81095d-3084-427f-8f0e-bdd180180c31" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.163890 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f81095d-3084-427f-8f0e-bdd180180c31-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "4f81095d-3084-427f-8f0e-bdd180180c31" (UID: "4f81095d-3084-427f-8f0e-bdd180180c31"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.245840 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctpwp\" (UniqueName: \"kubernetes.io/projected/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-kube-api-access-ctpwp\") pod \"redhat-operators-m2wlg\" (UID: \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\") " pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.245926 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-utilities\") pod \"redhat-operators-m2wlg\" (UID: \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\") " pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.245966 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-catalog-content\") pod \"redhat-operators-m2wlg\" (UID: \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\") " pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.246046 4935 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4f81095d-3084-427f-8f0e-bdd180180c31-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.246058 4935 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.246068 4935 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.246078 4935 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4f81095d-3084-427f-8f0e-bdd180180c31-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.246112 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdhx6\" (UniqueName: \"kubernetes.io/projected/4f81095d-3084-427f-8f0e-bdd180180c31-kube-api-access-qdhx6\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.246124 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f81095d-3084-427f-8f0e-bdd180180c31-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.246134 4935 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4f81095d-3084-427f-8f0e-bdd180180c31-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.246693 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-utilities\") pod \"redhat-operators-m2wlg\" (UID: \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\") " pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.246780 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-catalog-content\") pod \"redhat-operators-m2wlg\" (UID: \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\") " pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.264217 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctpwp\" (UniqueName: \"kubernetes.io/projected/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-kube-api-access-ctpwp\") pod \"redhat-operators-m2wlg\" (UID: \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\") " pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.381092 4935 generic.go:334] "Generic (PLEG): container finished" podID="4f81095d-3084-427f-8f0e-bdd180180c31" containerID="758e0c26883cd13f794fc41d860114619e79bfce697305bba16d165e631a5277" exitCode=0 Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.381163 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.381187 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" event={"ID":"4f81095d-3084-427f-8f0e-bdd180180c31","Type":"ContainerDied","Data":"758e0c26883cd13f794fc41d860114619e79bfce697305bba16d165e631a5277"} Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.381217 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-89k72" event={"ID":"4f81095d-3084-427f-8f0e-bdd180180c31","Type":"ContainerDied","Data":"26004290f015a1ed5f5dc6aeaff71951ccb4753bb0c2bc9127436eac375148b5"} Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.381234 4935 scope.go:117] "RemoveContainer" containerID="758e0c26883cd13f794fc41d860114619e79bfce697305bba16d165e631a5277" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.387584 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.409385 4935 scope.go:117] "RemoveContainer" containerID="758e0c26883cd13f794fc41d860114619e79bfce697305bba16d165e631a5277" Dec 01 18:35:56 crc kubenswrapper[4935]: E1201 18:35:56.410277 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"758e0c26883cd13f794fc41d860114619e79bfce697305bba16d165e631a5277\": container with ID starting with 758e0c26883cd13f794fc41d860114619e79bfce697305bba16d165e631a5277 not found: ID does not exist" containerID="758e0c26883cd13f794fc41d860114619e79bfce697305bba16d165e631a5277" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.410343 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"758e0c26883cd13f794fc41d860114619e79bfce697305bba16d165e631a5277"} err="failed to get container status \"758e0c26883cd13f794fc41d860114619e79bfce697305bba16d165e631a5277\": rpc error: code = NotFound desc = could not find container \"758e0c26883cd13f794fc41d860114619e79bfce697305bba16d165e631a5277\": container with ID starting with 758e0c26883cd13f794fc41d860114619e79bfce697305bba16d165e631a5277 not found: ID does not exist" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.415528 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-89k72"] Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.427723 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-89k72"] Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.516704 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0efae526-1f2b-44b5-b69e-64af2f426aa8" path="/var/lib/kubelet/pods/0efae526-1f2b-44b5-b69e-64af2f426aa8/volumes" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.518199 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f81095d-3084-427f-8f0e-bdd180180c31" path="/var/lib/kubelet/pods/4f81095d-3084-427f-8f0e-bdd180180c31/volumes" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.518855 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="585dfe75-4262-4b8b-9874-25e51b01cafd" path="/var/lib/kubelet/pods/585dfe75-4262-4b8b-9874-25e51b01cafd/volumes" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.521237 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66550924-3006-4d90-b516-ac5ea6155bbc" path="/var/lib/kubelet/pods/66550924-3006-4d90-b516-ac5ea6155bbc/volumes" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.522049 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91c00d11-75b5-492f-8a4d-74e87a6aa2fe" path="/var/lib/kubelet/pods/91c00d11-75b5-492f-8a4d-74e87a6aa2fe/volumes" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.523463 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b123b9f1-7d6b-496c-87c2-7790b027abd6" path="/var/lib/kubelet/pods/b123b9f1-7d6b-496c-87c2-7790b027abd6/volumes" Dec 01 18:35:56 crc kubenswrapper[4935]: I1201 18:35:56.797224 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m2wlg"] Dec 01 18:35:57 crc kubenswrapper[4935]: I1201 18:35:57.394060 4935 generic.go:334] "Generic (PLEG): container finished" podID="fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" containerID="1c4dfedcf2629930d510a4e83f2d05b36bb46122516c498b74319615ef810fe1" exitCode=0 Dec 01 18:35:57 crc kubenswrapper[4935]: I1201 18:35:57.394183 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2wlg" event={"ID":"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0","Type":"ContainerDied","Data":"1c4dfedcf2629930d510a4e83f2d05b36bb46122516c498b74319615ef810fe1"} Dec 01 18:35:57 crc kubenswrapper[4935]: I1201 18:35:57.394287 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2wlg" event={"ID":"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0","Type":"ContainerStarted","Data":"75b050cc7291978bcd68ed84c5a0887c50b22b06f49c2130d9159c3543193b09"} Dec 01 18:35:57 crc kubenswrapper[4935]: I1201 18:35:57.866290 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jd8nr"] Dec 01 18:35:57 crc kubenswrapper[4935]: I1201 18:35:57.869625 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:35:57 crc kubenswrapper[4935]: I1201 18:35:57.871867 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 01 18:35:57 crc kubenswrapper[4935]: I1201 18:35:57.876922 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jd8nr"] Dec 01 18:35:57 crc kubenswrapper[4935]: I1201 18:35:57.978585 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lqlv\" (UniqueName: \"kubernetes.io/projected/efec9635-2457-41e3-8477-b6f6081dc30f-kube-api-access-8lqlv\") pod \"certified-operators-jd8nr\" (UID: \"efec9635-2457-41e3-8477-b6f6081dc30f\") " pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:35:57 crc kubenswrapper[4935]: I1201 18:35:57.978933 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efec9635-2457-41e3-8477-b6f6081dc30f-catalog-content\") pod \"certified-operators-jd8nr\" (UID: \"efec9635-2457-41e3-8477-b6f6081dc30f\") " pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:35:57 crc kubenswrapper[4935]: I1201 18:35:57.979071 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efec9635-2457-41e3-8477-b6f6081dc30f-utilities\") pod \"certified-operators-jd8nr\" (UID: \"efec9635-2457-41e3-8477-b6f6081dc30f\") " pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.080966 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efec9635-2457-41e3-8477-b6f6081dc30f-catalog-content\") pod \"certified-operators-jd8nr\" (UID: \"efec9635-2457-41e3-8477-b6f6081dc30f\") " pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.081124 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efec9635-2457-41e3-8477-b6f6081dc30f-utilities\") pod \"certified-operators-jd8nr\" (UID: \"efec9635-2457-41e3-8477-b6f6081dc30f\") " pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.081190 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lqlv\" (UniqueName: \"kubernetes.io/projected/efec9635-2457-41e3-8477-b6f6081dc30f-kube-api-access-8lqlv\") pod \"certified-operators-jd8nr\" (UID: \"efec9635-2457-41e3-8477-b6f6081dc30f\") " pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.081866 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efec9635-2457-41e3-8477-b6f6081dc30f-catalog-content\") pod \"certified-operators-jd8nr\" (UID: \"efec9635-2457-41e3-8477-b6f6081dc30f\") " pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.081935 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efec9635-2457-41e3-8477-b6f6081dc30f-utilities\") pod \"certified-operators-jd8nr\" (UID: \"efec9635-2457-41e3-8477-b6f6081dc30f\") " pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.117786 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lqlv\" (UniqueName: \"kubernetes.io/projected/efec9635-2457-41e3-8477-b6f6081dc30f-kube-api-access-8lqlv\") pod \"certified-operators-jd8nr\" (UID: \"efec9635-2457-41e3-8477-b6f6081dc30f\") " pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.193977 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.472548 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mx79s"] Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.482710 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mx79s"] Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.482853 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.487232 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.588620 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7824b0e-01a6-40fd-a645-510e6e4bb088-utilities\") pod \"community-operators-mx79s\" (UID: \"f7824b0e-01a6-40fd-a645-510e6e4bb088\") " pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.588674 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7824b0e-01a6-40fd-a645-510e6e4bb088-catalog-content\") pod \"community-operators-mx79s\" (UID: \"f7824b0e-01a6-40fd-a645-510e6e4bb088\") " pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.588934 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xl4rb\" (UniqueName: \"kubernetes.io/projected/f7824b0e-01a6-40fd-a645-510e6e4bb088-kube-api-access-xl4rb\") pod \"community-operators-mx79s\" (UID: \"f7824b0e-01a6-40fd-a645-510e6e4bb088\") " pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.691308 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xl4rb\" (UniqueName: \"kubernetes.io/projected/f7824b0e-01a6-40fd-a645-510e6e4bb088-kube-api-access-xl4rb\") pod \"community-operators-mx79s\" (UID: \"f7824b0e-01a6-40fd-a645-510e6e4bb088\") " pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.691421 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7824b0e-01a6-40fd-a645-510e6e4bb088-utilities\") pod \"community-operators-mx79s\" (UID: \"f7824b0e-01a6-40fd-a645-510e6e4bb088\") " pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.691456 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7824b0e-01a6-40fd-a645-510e6e4bb088-catalog-content\") pod \"community-operators-mx79s\" (UID: \"f7824b0e-01a6-40fd-a645-510e6e4bb088\") " pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.692134 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7824b0e-01a6-40fd-a645-510e6e4bb088-utilities\") pod \"community-operators-mx79s\" (UID: \"f7824b0e-01a6-40fd-a645-510e6e4bb088\") " pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.692230 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7824b0e-01a6-40fd-a645-510e6e4bb088-catalog-content\") pod \"community-operators-mx79s\" (UID: \"f7824b0e-01a6-40fd-a645-510e6e4bb088\") " pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.695921 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jd8nr"] Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.719527 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xl4rb\" (UniqueName: \"kubernetes.io/projected/f7824b0e-01a6-40fd-a645-510e6e4bb088-kube-api-access-xl4rb\") pod \"community-operators-mx79s\" (UID: \"f7824b0e-01a6-40fd-a645-510e6e4bb088\") " pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:35:58 crc kubenswrapper[4935]: I1201 18:35:58.817997 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:35:59 crc kubenswrapper[4935]: I1201 18:35:59.248352 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mx79s"] Dec 01 18:35:59 crc kubenswrapper[4935]: W1201 18:35:59.255305 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7824b0e_01a6_40fd_a645_510e6e4bb088.slice/crio-0aa907111a9e4303731a5122a69abcd214fc963a94d759bf964a88dd95eb8f21 WatchSource:0}: Error finding container 0aa907111a9e4303731a5122a69abcd214fc963a94d759bf964a88dd95eb8f21: Status 404 returned error can't find the container with id 0aa907111a9e4303731a5122a69abcd214fc963a94d759bf964a88dd95eb8f21 Dec 01 18:35:59 crc kubenswrapper[4935]: I1201 18:35:59.413941 4935 generic.go:334] "Generic (PLEG): container finished" podID="efec9635-2457-41e3-8477-b6f6081dc30f" containerID="54a09b2370c20ebe31fb5695e3df987b020de8e85acdf0027eb94a35f8ef3cb4" exitCode=0 Dec 01 18:35:59 crc kubenswrapper[4935]: I1201 18:35:59.414011 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd8nr" event={"ID":"efec9635-2457-41e3-8477-b6f6081dc30f","Type":"ContainerDied","Data":"54a09b2370c20ebe31fb5695e3df987b020de8e85acdf0027eb94a35f8ef3cb4"} Dec 01 18:35:59 crc kubenswrapper[4935]: I1201 18:35:59.414319 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd8nr" event={"ID":"efec9635-2457-41e3-8477-b6f6081dc30f","Type":"ContainerStarted","Data":"43bed21d7055abce0a981bcbfcd26057454a8cfe8445460ed685c8b74d0b7a85"} Dec 01 18:35:59 crc kubenswrapper[4935]: I1201 18:35:59.416523 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2wlg" event={"ID":"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0","Type":"ContainerStarted","Data":"770bfd82cf1ad496c31e78cf16061fea951ea28b36556b36ace8380ccb15d0bf"} Dec 01 18:35:59 crc kubenswrapper[4935]: I1201 18:35:59.417750 4935 generic.go:334] "Generic (PLEG): container finished" podID="f7824b0e-01a6-40fd-a645-510e6e4bb088" containerID="4f2d0b79b5e2526ce2294c3885fe6c5c4c76c02ea0232f5363cb97dfa64df95c" exitCode=0 Dec 01 18:35:59 crc kubenswrapper[4935]: I1201 18:35:59.417802 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mx79s" event={"ID":"f7824b0e-01a6-40fd-a645-510e6e4bb088","Type":"ContainerDied","Data":"4f2d0b79b5e2526ce2294c3885fe6c5c4c76c02ea0232f5363cb97dfa64df95c"} Dec 01 18:35:59 crc kubenswrapper[4935]: I1201 18:35:59.417834 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mx79s" event={"ID":"f7824b0e-01a6-40fd-a645-510e6e4bb088","Type":"ContainerStarted","Data":"0aa907111a9e4303731a5122a69abcd214fc963a94d759bf964a88dd95eb8f21"} Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.272367 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l5vnc"] Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.280252 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.280810 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5vnc"] Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.283414 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.419176 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlljt\" (UniqueName: \"kubernetes.io/projected/e07502f2-97bd-468a-bd48-7d309cb9ee99-kube-api-access-tlljt\") pod \"redhat-marketplace-l5vnc\" (UID: \"e07502f2-97bd-468a-bd48-7d309cb9ee99\") " pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.419241 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e07502f2-97bd-468a-bd48-7d309cb9ee99-utilities\") pod \"redhat-marketplace-l5vnc\" (UID: \"e07502f2-97bd-468a-bd48-7d309cb9ee99\") " pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.419298 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e07502f2-97bd-468a-bd48-7d309cb9ee99-catalog-content\") pod \"redhat-marketplace-l5vnc\" (UID: \"e07502f2-97bd-468a-bd48-7d309cb9ee99\") " pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.424640 4935 generic.go:334] "Generic (PLEG): container finished" podID="fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" containerID="770bfd82cf1ad496c31e78cf16061fea951ea28b36556b36ace8380ccb15d0bf" exitCode=0 Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.424674 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2wlg" event={"ID":"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0","Type":"ContainerDied","Data":"770bfd82cf1ad496c31e78cf16061fea951ea28b36556b36ace8380ccb15d0bf"} Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.525883 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e07502f2-97bd-468a-bd48-7d309cb9ee99-catalog-content\") pod \"redhat-marketplace-l5vnc\" (UID: \"e07502f2-97bd-468a-bd48-7d309cb9ee99\") " pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.526439 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e07502f2-97bd-468a-bd48-7d309cb9ee99-catalog-content\") pod \"redhat-marketplace-l5vnc\" (UID: \"e07502f2-97bd-468a-bd48-7d309cb9ee99\") " pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.526991 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlljt\" (UniqueName: \"kubernetes.io/projected/e07502f2-97bd-468a-bd48-7d309cb9ee99-kube-api-access-tlljt\") pod \"redhat-marketplace-l5vnc\" (UID: \"e07502f2-97bd-468a-bd48-7d309cb9ee99\") " pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.527315 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e07502f2-97bd-468a-bd48-7d309cb9ee99-utilities\") pod \"redhat-marketplace-l5vnc\" (UID: \"e07502f2-97bd-468a-bd48-7d309cb9ee99\") " pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.527651 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e07502f2-97bd-468a-bd48-7d309cb9ee99-utilities\") pod \"redhat-marketplace-l5vnc\" (UID: \"e07502f2-97bd-468a-bd48-7d309cb9ee99\") " pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.543675 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlljt\" (UniqueName: \"kubernetes.io/projected/e07502f2-97bd-468a-bd48-7d309cb9ee99-kube-api-access-tlljt\") pod \"redhat-marketplace-l5vnc\" (UID: \"e07502f2-97bd-468a-bd48-7d309cb9ee99\") " pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:00 crc kubenswrapper[4935]: I1201 18:36:00.639942 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:00 crc kubenswrapper[4935]: E1201 18:36:00.964258 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podefec9635_2457_41e3_8477_b6f6081dc30f.slice/crio-365ed6df528c4a7da2e4f4169085e76f9ae82f1e244d521ff4270381a8eeca1b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podefec9635_2457_41e3_8477_b6f6081dc30f.slice/crio-conmon-365ed6df528c4a7da2e4f4169085e76f9ae82f1e244d521ff4270381a8eeca1b.scope\": RecentStats: unable to find data in memory cache]" Dec 01 18:36:01 crc kubenswrapper[4935]: I1201 18:36:01.079347 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5vnc"] Dec 01 18:36:01 crc kubenswrapper[4935]: W1201 18:36:01.086419 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode07502f2_97bd_468a_bd48_7d309cb9ee99.slice/crio-3794d22ce3a0572e182d2fd318c0aeee21c9685b6999107f26ed903fd53c9f7d WatchSource:0}: Error finding container 3794d22ce3a0572e182d2fd318c0aeee21c9685b6999107f26ed903fd53c9f7d: Status 404 returned error can't find the container with id 3794d22ce3a0572e182d2fd318c0aeee21c9685b6999107f26ed903fd53c9f7d Dec 01 18:36:01 crc kubenswrapper[4935]: I1201 18:36:01.431302 4935 generic.go:334] "Generic (PLEG): container finished" podID="efec9635-2457-41e3-8477-b6f6081dc30f" containerID="365ed6df528c4a7da2e4f4169085e76f9ae82f1e244d521ff4270381a8eeca1b" exitCode=0 Dec 01 18:36:01 crc kubenswrapper[4935]: I1201 18:36:01.431378 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd8nr" event={"ID":"efec9635-2457-41e3-8477-b6f6081dc30f","Type":"ContainerDied","Data":"365ed6df528c4a7da2e4f4169085e76f9ae82f1e244d521ff4270381a8eeca1b"} Dec 01 18:36:01 crc kubenswrapper[4935]: I1201 18:36:01.434641 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2wlg" event={"ID":"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0","Type":"ContainerStarted","Data":"ce7ca132af5359fe00fde65e74163833180045bd9769e12ff48e8c940997b511"} Dec 01 18:36:01 crc kubenswrapper[4935]: I1201 18:36:01.442635 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mx79s" event={"ID":"f7824b0e-01a6-40fd-a645-510e6e4bb088","Type":"ContainerStarted","Data":"773478452f597fdace3763024e380c6a8f6d4e1d681e4c4828ca502806c68169"} Dec 01 18:36:01 crc kubenswrapper[4935]: I1201 18:36:01.444001 4935 generic.go:334] "Generic (PLEG): container finished" podID="e07502f2-97bd-468a-bd48-7d309cb9ee99" containerID="adc6dc4c74d42cb2caf3a1a7d56ea044d755b47f0d614d802dd86c891f79c69d" exitCode=0 Dec 01 18:36:01 crc kubenswrapper[4935]: I1201 18:36:01.444030 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5vnc" event={"ID":"e07502f2-97bd-468a-bd48-7d309cb9ee99","Type":"ContainerDied","Data":"adc6dc4c74d42cb2caf3a1a7d56ea044d755b47f0d614d802dd86c891f79c69d"} Dec 01 18:36:01 crc kubenswrapper[4935]: I1201 18:36:01.444046 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5vnc" event={"ID":"e07502f2-97bd-468a-bd48-7d309cb9ee99","Type":"ContainerStarted","Data":"3794d22ce3a0572e182d2fd318c0aeee21c9685b6999107f26ed903fd53c9f7d"} Dec 01 18:36:01 crc kubenswrapper[4935]: I1201 18:36:01.472670 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m2wlg" podStartSLOduration=1.7449893840000001 podStartE2EDuration="5.472653471s" podCreationTimestamp="2025-12-01 18:35:56 +0000 UTC" firstStartedPulling="2025-12-01 18:35:57.398376702 +0000 UTC m=+371.420005971" lastFinishedPulling="2025-12-01 18:36:01.126040799 +0000 UTC m=+375.147670058" observedRunningTime="2025-12-01 18:36:01.469003225 +0000 UTC m=+375.490632484" watchObservedRunningTime="2025-12-01 18:36:01.472653471 +0000 UTC m=+375.494282730" Dec 01 18:36:02 crc kubenswrapper[4935]: I1201 18:36:02.457446 4935 generic.go:334] "Generic (PLEG): container finished" podID="f7824b0e-01a6-40fd-a645-510e6e4bb088" containerID="773478452f597fdace3763024e380c6a8f6d4e1d681e4c4828ca502806c68169" exitCode=0 Dec 01 18:36:02 crc kubenswrapper[4935]: I1201 18:36:02.457540 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mx79s" event={"ID":"f7824b0e-01a6-40fd-a645-510e6e4bb088","Type":"ContainerDied","Data":"773478452f597fdace3763024e380c6a8f6d4e1d681e4c4828ca502806c68169"} Dec 01 18:36:03 crc kubenswrapper[4935]: I1201 18:36:03.474138 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mx79s" event={"ID":"f7824b0e-01a6-40fd-a645-510e6e4bb088","Type":"ContainerStarted","Data":"4e215a7a78e75f39a9d6f03e5ea1ecf34ab6978e0d4cf0c5207da34047bd6e50"} Dec 01 18:36:03 crc kubenswrapper[4935]: I1201 18:36:03.478824 4935 generic.go:334] "Generic (PLEG): container finished" podID="e07502f2-97bd-468a-bd48-7d309cb9ee99" containerID="c7adbc0ba9f0e42e11db682c8d64d94b39992878ab504ec5b2c72d3844dfcefe" exitCode=0 Dec 01 18:36:03 crc kubenswrapper[4935]: I1201 18:36:03.478874 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5vnc" event={"ID":"e07502f2-97bd-468a-bd48-7d309cb9ee99","Type":"ContainerDied","Data":"c7adbc0ba9f0e42e11db682c8d64d94b39992878ab504ec5b2c72d3844dfcefe"} Dec 01 18:36:03 crc kubenswrapper[4935]: I1201 18:36:03.490683 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mx79s" podStartSLOduration=1.737194257 podStartE2EDuration="5.4906676s" podCreationTimestamp="2025-12-01 18:35:58 +0000 UTC" firstStartedPulling="2025-12-01 18:35:59.419337605 +0000 UTC m=+373.440966884" lastFinishedPulling="2025-12-01 18:36:03.172810968 +0000 UTC m=+377.194440227" observedRunningTime="2025-12-01 18:36:03.489455502 +0000 UTC m=+377.511084761" watchObservedRunningTime="2025-12-01 18:36:03.4906676 +0000 UTC m=+377.512296859" Dec 01 18:36:04 crc kubenswrapper[4935]: I1201 18:36:04.486294 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5vnc" event={"ID":"e07502f2-97bd-468a-bd48-7d309cb9ee99","Type":"ContainerStarted","Data":"b93ffb792ebfe0cf8491403c7a621d4233db648374793189817eaeaa94101a98"} Dec 01 18:36:04 crc kubenswrapper[4935]: I1201 18:36:04.510180 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l5vnc" podStartSLOduration=1.903774695 podStartE2EDuration="4.510139461s" podCreationTimestamp="2025-12-01 18:36:00 +0000 UTC" firstStartedPulling="2025-12-01 18:36:01.445174492 +0000 UTC m=+375.466803751" lastFinishedPulling="2025-12-01 18:36:04.051539258 +0000 UTC m=+378.073168517" observedRunningTime="2025-12-01 18:36:04.505019369 +0000 UTC m=+378.526648628" watchObservedRunningTime="2025-12-01 18:36:04.510139461 +0000 UTC m=+378.531768720" Dec 01 18:36:04 crc kubenswrapper[4935]: I1201 18:36:04.948165 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7"] Dec 01 18:36:04 crc kubenswrapper[4935]: I1201 18:36:04.948479 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" podUID="67a3e4a8-d918-48d6-adc0-edf997699f14" containerName="route-controller-manager" containerID="cri-o://a9212783fedbba4d5fce7ab52adaa4fd072fb6f3c11ed92ad3ed535a012ae97b" gracePeriod=30 Dec 01 18:36:05 crc kubenswrapper[4935]: I1201 18:36:05.493455 4935 generic.go:334] "Generic (PLEG): container finished" podID="67a3e4a8-d918-48d6-adc0-edf997699f14" containerID="a9212783fedbba4d5fce7ab52adaa4fd072fb6f3c11ed92ad3ed535a012ae97b" exitCode=0 Dec 01 18:36:05 crc kubenswrapper[4935]: I1201 18:36:05.493567 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" event={"ID":"67a3e4a8-d918-48d6-adc0-edf997699f14","Type":"ContainerDied","Data":"a9212783fedbba4d5fce7ab52adaa4fd072fb6f3c11ed92ad3ed535a012ae97b"} Dec 01 18:36:06 crc kubenswrapper[4935]: I1201 18:36:06.388366 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:36:06 crc kubenswrapper[4935]: I1201 18:36:06.388864 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:36:06 crc kubenswrapper[4935]: I1201 18:36:06.439422 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:36:06 crc kubenswrapper[4935]: I1201 18:36:06.500936 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" event={"ID":"67a3e4a8-d918-48d6-adc0-edf997699f14","Type":"ContainerDied","Data":"dfa90365d8c7d43f61a9f00f473736183c620162810e23b3bf05e0cdc0e61b79"} Dec 01 18:36:06 crc kubenswrapper[4935]: I1201 18:36:06.502137 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dfa90365d8c7d43f61a9f00f473736183c620162810e23b3bf05e0cdc0e61b79" Dec 01 18:36:06 crc kubenswrapper[4935]: I1201 18:36:06.565272 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.102325 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.145964 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w"] Dec 01 18:36:07 crc kubenswrapper[4935]: E1201 18:36:07.146298 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67a3e4a8-d918-48d6-adc0-edf997699f14" containerName="route-controller-manager" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.146316 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="67a3e4a8-d918-48d6-adc0-edf997699f14" containerName="route-controller-manager" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.147637 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="67a3e4a8-d918-48d6-adc0-edf997699f14" containerName="route-controller-manager" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.152267 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.158951 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w"] Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.220436 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67a3e4a8-d918-48d6-adc0-edf997699f14-config\") pod \"67a3e4a8-d918-48d6-adc0-edf997699f14\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.220517 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/67a3e4a8-d918-48d6-adc0-edf997699f14-client-ca\") pod \"67a3e4a8-d918-48d6-adc0-edf997699f14\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.220600 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tjgv\" (UniqueName: \"kubernetes.io/projected/67a3e4a8-d918-48d6-adc0-edf997699f14-kube-api-access-5tjgv\") pod \"67a3e4a8-d918-48d6-adc0-edf997699f14\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.220645 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67a3e4a8-d918-48d6-adc0-edf997699f14-serving-cert\") pod \"67a3e4a8-d918-48d6-adc0-edf997699f14\" (UID: \"67a3e4a8-d918-48d6-adc0-edf997699f14\") " Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.222267 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67a3e4a8-d918-48d6-adc0-edf997699f14-config" (OuterVolumeSpecName: "config") pod "67a3e4a8-d918-48d6-adc0-edf997699f14" (UID: "67a3e4a8-d918-48d6-adc0-edf997699f14"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.223163 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67a3e4a8-d918-48d6-adc0-edf997699f14-client-ca" (OuterVolumeSpecName: "client-ca") pod "67a3e4a8-d918-48d6-adc0-edf997699f14" (UID: "67a3e4a8-d918-48d6-adc0-edf997699f14"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.227780 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67a3e4a8-d918-48d6-adc0-edf997699f14-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "67a3e4a8-d918-48d6-adc0-edf997699f14" (UID: "67a3e4a8-d918-48d6-adc0-edf997699f14"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.244267 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67a3e4a8-d918-48d6-adc0-edf997699f14-kube-api-access-5tjgv" (OuterVolumeSpecName: "kube-api-access-5tjgv") pod "67a3e4a8-d918-48d6-adc0-edf997699f14" (UID: "67a3e4a8-d918-48d6-adc0-edf997699f14"). InnerVolumeSpecName "kube-api-access-5tjgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.326122 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7rzx\" (UniqueName: \"kubernetes.io/projected/8d304ec4-0f1e-46a4-91d4-f8667b21bac6-kube-api-access-v7rzx\") pod \"route-controller-manager-74b5f7b547-2rb7w\" (UID: \"8d304ec4-0f1e-46a4-91d4-f8667b21bac6\") " pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.326214 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8d304ec4-0f1e-46a4-91d4-f8667b21bac6-client-ca\") pod \"route-controller-manager-74b5f7b547-2rb7w\" (UID: \"8d304ec4-0f1e-46a4-91d4-f8667b21bac6\") " pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.326377 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d304ec4-0f1e-46a4-91d4-f8667b21bac6-config\") pod \"route-controller-manager-74b5f7b547-2rb7w\" (UID: \"8d304ec4-0f1e-46a4-91d4-f8667b21bac6\") " pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.326432 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d304ec4-0f1e-46a4-91d4-f8667b21bac6-serving-cert\") pod \"route-controller-manager-74b5f7b547-2rb7w\" (UID: \"8d304ec4-0f1e-46a4-91d4-f8667b21bac6\") " pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.326712 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67a3e4a8-d918-48d6-adc0-edf997699f14-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.326728 4935 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/67a3e4a8-d918-48d6-adc0-edf997699f14-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.326744 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tjgv\" (UniqueName: \"kubernetes.io/projected/67a3e4a8-d918-48d6-adc0-edf997699f14-kube-api-access-5tjgv\") on node \"crc\" DevicePath \"\"" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.326757 4935 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67a3e4a8-d918-48d6-adc0-edf997699f14-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.428080 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7rzx\" (UniqueName: \"kubernetes.io/projected/8d304ec4-0f1e-46a4-91d4-f8667b21bac6-kube-api-access-v7rzx\") pod \"route-controller-manager-74b5f7b547-2rb7w\" (UID: \"8d304ec4-0f1e-46a4-91d4-f8667b21bac6\") " pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.428575 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8d304ec4-0f1e-46a4-91d4-f8667b21bac6-client-ca\") pod \"route-controller-manager-74b5f7b547-2rb7w\" (UID: \"8d304ec4-0f1e-46a4-91d4-f8667b21bac6\") " pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.428653 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d304ec4-0f1e-46a4-91d4-f8667b21bac6-config\") pod \"route-controller-manager-74b5f7b547-2rb7w\" (UID: \"8d304ec4-0f1e-46a4-91d4-f8667b21bac6\") " pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.428691 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d304ec4-0f1e-46a4-91d4-f8667b21bac6-serving-cert\") pod \"route-controller-manager-74b5f7b547-2rb7w\" (UID: \"8d304ec4-0f1e-46a4-91d4-f8667b21bac6\") " pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.430710 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8d304ec4-0f1e-46a4-91d4-f8667b21bac6-client-ca\") pod \"route-controller-manager-74b5f7b547-2rb7w\" (UID: \"8d304ec4-0f1e-46a4-91d4-f8667b21bac6\") " pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.431039 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d304ec4-0f1e-46a4-91d4-f8667b21bac6-config\") pod \"route-controller-manager-74b5f7b547-2rb7w\" (UID: \"8d304ec4-0f1e-46a4-91d4-f8667b21bac6\") " pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.433252 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d304ec4-0f1e-46a4-91d4-f8667b21bac6-serving-cert\") pod \"route-controller-manager-74b5f7b547-2rb7w\" (UID: \"8d304ec4-0f1e-46a4-91d4-f8667b21bac6\") " pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.447742 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7rzx\" (UniqueName: \"kubernetes.io/projected/8d304ec4-0f1e-46a4-91d4-f8667b21bac6-kube-api-access-v7rzx\") pod \"route-controller-manager-74b5f7b547-2rb7w\" (UID: \"8d304ec4-0f1e-46a4-91d4-f8667b21bac6\") " pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.482357 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.506654 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd8nr" event={"ID":"efec9635-2457-41e3-8477-b6f6081dc30f","Type":"ContainerStarted","Data":"3bbe2e7b1a096cdd92cf9d26d72edece07a336371e2f68cb25d7cbe25b65c080"} Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.506768 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.533864 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jd8nr" podStartSLOduration=3.693915733 podStartE2EDuration="10.533842175s" podCreationTimestamp="2025-12-01 18:35:57 +0000 UTC" firstStartedPulling="2025-12-01 18:35:59.415916177 +0000 UTC m=+373.437545436" lastFinishedPulling="2025-12-01 18:36:06.255842619 +0000 UTC m=+380.277471878" observedRunningTime="2025-12-01 18:36:07.531891964 +0000 UTC m=+381.553521223" watchObservedRunningTime="2025-12-01 18:36:07.533842175 +0000 UTC m=+381.555471474" Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.548939 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7"] Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.554815 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7"] Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.889822 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w"] Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.976260 4935 patch_prober.go:28] interesting pod/route-controller-manager-559846b6c5-7mjx7 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.57:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 01 18:36:07 crc kubenswrapper[4935]: I1201 18:36:07.976329 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-559846b6c5-7mjx7" podUID="67a3e4a8-d918-48d6-adc0-edf997699f14" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.57:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 01 18:36:08 crc kubenswrapper[4935]: I1201 18:36:08.194825 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:36:08 crc kubenswrapper[4935]: I1201 18:36:08.195609 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:36:08 crc kubenswrapper[4935]: I1201 18:36:08.514357 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67a3e4a8-d918-48d6-adc0-edf997699f14" path="/var/lib/kubelet/pods/67a3e4a8-d918-48d6-adc0-edf997699f14/volumes" Dec 01 18:36:08 crc kubenswrapper[4935]: I1201 18:36:08.515106 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" event={"ID":"8d304ec4-0f1e-46a4-91d4-f8667b21bac6","Type":"ContainerStarted","Data":"8bc8d0a44177318cd1425254fe24d27c35bad20cd9156ff593eddd1359f640ef"} Dec 01 18:36:08 crc kubenswrapper[4935]: I1201 18:36:08.515907 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" event={"ID":"8d304ec4-0f1e-46a4-91d4-f8667b21bac6","Type":"ContainerStarted","Data":"8edfdf4a816096a67b5796a912b5bf32a0ac84c52e412a610be47281ad7384dc"} Dec 01 18:36:08 crc kubenswrapper[4935]: I1201 18:36:08.818573 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:36:08 crc kubenswrapper[4935]: I1201 18:36:08.818620 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:36:08 crc kubenswrapper[4935]: I1201 18:36:08.867481 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:36:08 crc kubenswrapper[4935]: I1201 18:36:08.885207 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" podStartSLOduration=4.885190131 podStartE2EDuration="4.885190131s" podCreationTimestamp="2025-12-01 18:36:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:36:08.530075321 +0000 UTC m=+382.551704580" watchObservedRunningTime="2025-12-01 18:36:08.885190131 +0000 UTC m=+382.906819390" Dec 01 18:36:09 crc kubenswrapper[4935]: I1201 18:36:09.247894 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-jd8nr" podUID="efec9635-2457-41e3-8477-b6f6081dc30f" containerName="registry-server" probeResult="failure" output=< Dec 01 18:36:09 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 18:36:09 crc kubenswrapper[4935]: > Dec 01 18:36:09 crc kubenswrapper[4935]: I1201 18:36:09.518908 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:09 crc kubenswrapper[4935]: I1201 18:36:09.526340 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-74b5f7b547-2rb7w" Dec 01 18:36:09 crc kubenswrapper[4935]: I1201 18:36:09.562993 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mx79s" Dec 01 18:36:10 crc kubenswrapper[4935]: I1201 18:36:10.640082 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:10 crc kubenswrapper[4935]: I1201 18:36:10.640179 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:10 crc kubenswrapper[4935]: I1201 18:36:10.693300 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:11 crc kubenswrapper[4935]: I1201 18:36:11.589056 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l5vnc" Dec 01 18:36:18 crc kubenswrapper[4935]: I1201 18:36:18.256672 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:36:18 crc kubenswrapper[4935]: I1201 18:36:18.317952 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 18:36:24 crc kubenswrapper[4935]: I1201 18:36:24.346237 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:36:24 crc kubenswrapper[4935]: I1201 18:36:24.346902 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:36:24 crc kubenswrapper[4935]: I1201 18:36:24.346975 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:36:24 crc kubenswrapper[4935]: I1201 18:36:24.347788 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ba70f74e54e1786deaed12104c295d7b917d7f0c9ecd296020b6c7c70c481193"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 18:36:24 crc kubenswrapper[4935]: I1201 18:36:24.347864 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://ba70f74e54e1786deaed12104c295d7b917d7f0c9ecd296020b6c7c70c481193" gracePeriod=600 Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.630675 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="ba70f74e54e1786deaed12104c295d7b917d7f0c9ecd296020b6c7c70c481193" exitCode=0 Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.631055 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"ba70f74e54e1786deaed12104c295d7b917d7f0c9ecd296020b6c7c70c481193"} Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.631104 4935 scope.go:117] "RemoveContainer" containerID="c24b72535efc70a772114a48bac500ea2c30dfd468470fbc8422487a71aedd05" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.636074 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222"] Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.637404 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.651236 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"openshift-service-ca.crt" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.651615 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"cluster-monitoring-operator-tls" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.654485 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"telemetry-config" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.655038 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"kube-root-ca.crt" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.655265 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"cluster-monitoring-operator-dockercfg-wwt9l" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.665098 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222"] Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.797234 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnzw6\" (UniqueName: \"kubernetes.io/projected/adfab833-b721-49a9-a8c2-2603b4d010a1-kube-api-access-bnzw6\") pod \"cluster-monitoring-operator-6d5b84845-v8222\" (UID: \"adfab833-b721-49a9-a8c2-2603b4d010a1\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.797473 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-monitoring-operator-tls\" (UniqueName: \"kubernetes.io/secret/adfab833-b721-49a9-a8c2-2603b4d010a1-cluster-monitoring-operator-tls\") pod \"cluster-monitoring-operator-6d5b84845-v8222\" (UID: \"adfab833-b721-49a9-a8c2-2603b4d010a1\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.797813 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-config\" (UniqueName: \"kubernetes.io/configmap/adfab833-b721-49a9-a8c2-2603b4d010a1-telemetry-config\") pod \"cluster-monitoring-operator-6d5b84845-v8222\" (UID: \"adfab833-b721-49a9-a8c2-2603b4d010a1\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.900045 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnzw6\" (UniqueName: \"kubernetes.io/projected/adfab833-b721-49a9-a8c2-2603b4d010a1-kube-api-access-bnzw6\") pod \"cluster-monitoring-operator-6d5b84845-v8222\" (UID: \"adfab833-b721-49a9-a8c2-2603b4d010a1\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.900164 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-monitoring-operator-tls\" (UniqueName: \"kubernetes.io/secret/adfab833-b721-49a9-a8c2-2603b4d010a1-cluster-monitoring-operator-tls\") pod \"cluster-monitoring-operator-6d5b84845-v8222\" (UID: \"adfab833-b721-49a9-a8c2-2603b4d010a1\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.900213 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-config\" (UniqueName: \"kubernetes.io/configmap/adfab833-b721-49a9-a8c2-2603b4d010a1-telemetry-config\") pod \"cluster-monitoring-operator-6d5b84845-v8222\" (UID: \"adfab833-b721-49a9-a8c2-2603b4d010a1\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.903302 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-config\" (UniqueName: \"kubernetes.io/configmap/adfab833-b721-49a9-a8c2-2603b4d010a1-telemetry-config\") pod \"cluster-monitoring-operator-6d5b84845-v8222\" (UID: \"adfab833-b721-49a9-a8c2-2603b4d010a1\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.911545 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-monitoring-operator-tls\" (UniqueName: \"kubernetes.io/secret/adfab833-b721-49a9-a8c2-2603b4d010a1-cluster-monitoring-operator-tls\") pod \"cluster-monitoring-operator-6d5b84845-v8222\" (UID: \"adfab833-b721-49a9-a8c2-2603b4d010a1\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.931787 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnzw6\" (UniqueName: \"kubernetes.io/projected/adfab833-b721-49a9-a8c2-2603b4d010a1-kube-api-access-bnzw6\") pod \"cluster-monitoring-operator-6d5b84845-v8222\" (UID: \"adfab833-b721-49a9-a8c2-2603b4d010a1\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" Dec 01 18:36:25 crc kubenswrapper[4935]: I1201 18:36:25.980358 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" Dec 01 18:36:26 crc kubenswrapper[4935]: I1201 18:36:26.441705 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222"] Dec 01 18:36:26 crc kubenswrapper[4935]: I1201 18:36:26.640476 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"b347e6f589fbccdde8049b52c2b8f25c113125fe0e4295d71410044e8cbbc0ae"} Dec 01 18:36:26 crc kubenswrapper[4935]: I1201 18:36:26.641654 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" event={"ID":"adfab833-b721-49a9-a8c2-2603b4d010a1","Type":"ContainerStarted","Data":"664d0212acdfdbc3ae1ffa0518cbe8e69a309eee1d297fedd9c90dc634038520"} Dec 01 18:36:30 crc kubenswrapper[4935]: I1201 18:36:30.273294 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc"] Dec 01 18:36:30 crc kubenswrapper[4935]: I1201 18:36:30.274255 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc" Dec 01 18:36:30 crc kubenswrapper[4935]: I1201 18:36:30.281722 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-admission-webhook-dockercfg-87v4s" Dec 01 18:36:30 crc kubenswrapper[4935]: I1201 18:36:30.285770 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc"] Dec 01 18:36:30 crc kubenswrapper[4935]: I1201 18:36:30.286718 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-admission-webhook-tls" Dec 01 18:36:30 crc kubenswrapper[4935]: I1201 18:36:30.462467 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/021f71bb-dfa1-45c6-9470-1b41ad435d94-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-4xmdc\" (UID: \"021f71bb-dfa1-45c6-9470-1b41ad435d94\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc" Dec 01 18:36:30 crc kubenswrapper[4935]: I1201 18:36:30.563933 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/021f71bb-dfa1-45c6-9470-1b41ad435d94-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-4xmdc\" (UID: \"021f71bb-dfa1-45c6-9470-1b41ad435d94\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc" Dec 01 18:36:30 crc kubenswrapper[4935]: E1201 18:36:30.564066 4935 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 01 18:36:30 crc kubenswrapper[4935]: E1201 18:36:30.564133 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/021f71bb-dfa1-45c6-9470-1b41ad435d94-tls-certificates podName:021f71bb-dfa1-45c6-9470-1b41ad435d94 nodeName:}" failed. No retries permitted until 2025-12-01 18:36:31.064115857 +0000 UTC m=+405.085745116 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/021f71bb-dfa1-45c6-9470-1b41ad435d94-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-4xmdc" (UID: "021f71bb-dfa1-45c6-9470-1b41ad435d94") : secret "prometheus-operator-admission-webhook-tls" not found Dec 01 18:36:30 crc kubenswrapper[4935]: I1201 18:36:30.677207 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" event={"ID":"adfab833-b721-49a9-a8c2-2603b4d010a1","Type":"ContainerStarted","Data":"74b9ee30c2b25afc8affc20be3394bc445f9bf4aaeeef202b069a4a6cdd11097"} Dec 01 18:36:31 crc kubenswrapper[4935]: I1201 18:36:31.069443 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/021f71bb-dfa1-45c6-9470-1b41ad435d94-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-4xmdc\" (UID: \"021f71bb-dfa1-45c6-9470-1b41ad435d94\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc" Dec 01 18:36:31 crc kubenswrapper[4935]: I1201 18:36:31.080458 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/021f71bb-dfa1-45c6-9470-1b41ad435d94-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-4xmdc\" (UID: \"021f71bb-dfa1-45c6-9470-1b41ad435d94\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc" Dec 01 18:36:31 crc kubenswrapper[4935]: I1201 18:36:31.198904 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc" Dec 01 18:36:31 crc kubenswrapper[4935]: I1201 18:36:31.748775 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-v8222" podStartSLOduration=3.610692081 podStartE2EDuration="6.748741021s" podCreationTimestamp="2025-12-01 18:36:25 +0000 UTC" firstStartedPulling="2025-12-01 18:36:26.457268869 +0000 UTC m=+400.478898168" lastFinishedPulling="2025-12-01 18:36:29.595317839 +0000 UTC m=+403.616947108" observedRunningTime="2025-12-01 18:36:30.700466539 +0000 UTC m=+404.722095838" watchObservedRunningTime="2025-12-01 18:36:31.748741021 +0000 UTC m=+405.770370320" Dec 01 18:36:31 crc kubenswrapper[4935]: I1201 18:36:31.756105 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc"] Dec 01 18:36:32 crc kubenswrapper[4935]: I1201 18:36:32.693774 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc" event={"ID":"021f71bb-dfa1-45c6-9470-1b41ad435d94","Type":"ContainerStarted","Data":"4224db8b4775c2da8651bba6936036b03a6c22fda7f4e9f6ba4986cb3a8d9c04"} Dec 01 18:36:34 crc kubenswrapper[4935]: I1201 18:36:34.710845 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc" event={"ID":"021f71bb-dfa1-45c6-9470-1b41ad435d94","Type":"ContainerStarted","Data":"c59944a3fc04f8c759868ffe8ba463f7d1350308ca46e429d6825f978f579190"} Dec 01 18:36:34 crc kubenswrapper[4935]: I1201 18:36:34.711769 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc" Dec 01 18:36:34 crc kubenswrapper[4935]: I1201 18:36:34.719001 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc" Dec 01 18:36:34 crc kubenswrapper[4935]: I1201 18:36:34.737623 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-4xmdc" podStartSLOduration=2.646589686 podStartE2EDuration="4.737549312s" podCreationTimestamp="2025-12-01 18:36:30 +0000 UTC" firstStartedPulling="2025-12-01 18:36:31.764053925 +0000 UTC m=+405.785683194" lastFinishedPulling="2025-12-01 18:36:33.855013531 +0000 UTC m=+407.876642820" observedRunningTime="2025-12-01 18:36:34.730884861 +0000 UTC m=+408.752514170" watchObservedRunningTime="2025-12-01 18:36:34.737549312 +0000 UTC m=+408.759178591" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.370457 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/prometheus-operator-db54df47d-qdmvk"] Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.372110 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.374828 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-kube-rbac-proxy-config" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.375043 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-tls" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.375299 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-dockercfg-hmlkh" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.376723 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"metrics-client-ca" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.391017 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-db54df47d-qdmvk"] Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.542758 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/f6245204-c4b8-44f2-90c2-64832beb3c17-metrics-client-ca\") pod \"prometheus-operator-db54df47d-qdmvk\" (UID: \"f6245204-c4b8-44f2-90c2-64832beb3c17\") " pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.542885 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tthl5\" (UniqueName: \"kubernetes.io/projected/f6245204-c4b8-44f2-90c2-64832beb3c17-kube-api-access-tthl5\") pod \"prometheus-operator-db54df47d-qdmvk\" (UID: \"f6245204-c4b8-44f2-90c2-64832beb3c17\") " pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.542921 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-operator-tls\" (UniqueName: \"kubernetes.io/secret/f6245204-c4b8-44f2-90c2-64832beb3c17-prometheus-operator-tls\") pod \"prometheus-operator-db54df47d-qdmvk\" (UID: \"f6245204-c4b8-44f2-90c2-64832beb3c17\") " pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.543003 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-operator-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/f6245204-c4b8-44f2-90c2-64832beb3c17-prometheus-operator-kube-rbac-proxy-config\") pod \"prometheus-operator-db54df47d-qdmvk\" (UID: \"f6245204-c4b8-44f2-90c2-64832beb3c17\") " pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.644754 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tthl5\" (UniqueName: \"kubernetes.io/projected/f6245204-c4b8-44f2-90c2-64832beb3c17-kube-api-access-tthl5\") pod \"prometheus-operator-db54df47d-qdmvk\" (UID: \"f6245204-c4b8-44f2-90c2-64832beb3c17\") " pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.645206 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-operator-tls\" (UniqueName: \"kubernetes.io/secret/f6245204-c4b8-44f2-90c2-64832beb3c17-prometheus-operator-tls\") pod \"prometheus-operator-db54df47d-qdmvk\" (UID: \"f6245204-c4b8-44f2-90c2-64832beb3c17\") " pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:35 crc kubenswrapper[4935]: E1201 18:36:35.645351 4935 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-tls: secret "prometheus-operator-tls" not found Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.645538 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-operator-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/f6245204-c4b8-44f2-90c2-64832beb3c17-prometheus-operator-kube-rbac-proxy-config\") pod \"prometheus-operator-db54df47d-qdmvk\" (UID: \"f6245204-c4b8-44f2-90c2-64832beb3c17\") " pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:35 crc kubenswrapper[4935]: E1201 18:36:35.645686 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6245204-c4b8-44f2-90c2-64832beb3c17-prometheus-operator-tls podName:f6245204-c4b8-44f2-90c2-64832beb3c17 nodeName:}" failed. No retries permitted until 2025-12-01 18:36:36.14562606 +0000 UTC m=+410.167255349 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "prometheus-operator-tls" (UniqueName: "kubernetes.io/secret/f6245204-c4b8-44f2-90c2-64832beb3c17-prometheus-operator-tls") pod "prometheus-operator-db54df47d-qdmvk" (UID: "f6245204-c4b8-44f2-90c2-64832beb3c17") : secret "prometheus-operator-tls" not found Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.645939 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/f6245204-c4b8-44f2-90c2-64832beb3c17-metrics-client-ca\") pod \"prometheus-operator-db54df47d-qdmvk\" (UID: \"f6245204-c4b8-44f2-90c2-64832beb3c17\") " pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.647663 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/f6245204-c4b8-44f2-90c2-64832beb3c17-metrics-client-ca\") pod \"prometheus-operator-db54df47d-qdmvk\" (UID: \"f6245204-c4b8-44f2-90c2-64832beb3c17\") " pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.651667 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-operator-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/f6245204-c4b8-44f2-90c2-64832beb3c17-prometheus-operator-kube-rbac-proxy-config\") pod \"prometheus-operator-db54df47d-qdmvk\" (UID: \"f6245204-c4b8-44f2-90c2-64832beb3c17\") " pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:35 crc kubenswrapper[4935]: I1201 18:36:35.664376 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tthl5\" (UniqueName: \"kubernetes.io/projected/f6245204-c4b8-44f2-90c2-64832beb3c17-kube-api-access-tthl5\") pod \"prometheus-operator-db54df47d-qdmvk\" (UID: \"f6245204-c4b8-44f2-90c2-64832beb3c17\") " pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:36 crc kubenswrapper[4935]: I1201 18:36:36.154262 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-operator-tls\" (UniqueName: \"kubernetes.io/secret/f6245204-c4b8-44f2-90c2-64832beb3c17-prometheus-operator-tls\") pod \"prometheus-operator-db54df47d-qdmvk\" (UID: \"f6245204-c4b8-44f2-90c2-64832beb3c17\") " pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:36 crc kubenswrapper[4935]: I1201 18:36:36.157726 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-operator-tls\" (UniqueName: \"kubernetes.io/secret/f6245204-c4b8-44f2-90c2-64832beb3c17-prometheus-operator-tls\") pod \"prometheus-operator-db54df47d-qdmvk\" (UID: \"f6245204-c4b8-44f2-90c2-64832beb3c17\") " pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:36 crc kubenswrapper[4935]: I1201 18:36:36.313693 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" Dec 01 18:36:36 crc kubenswrapper[4935]: I1201 18:36:36.864069 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-db54df47d-qdmvk"] Dec 01 18:36:36 crc kubenswrapper[4935]: W1201 18:36:36.870239 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf6245204_c4b8_44f2_90c2_64832beb3c17.slice/crio-70455a462a537968757b0f813bd7ed84b0a6f1352a8e3ecc9b7e0139fd8d24c9 WatchSource:0}: Error finding container 70455a462a537968757b0f813bd7ed84b0a6f1352a8e3ecc9b7e0139fd8d24c9: Status 404 returned error can't find the container with id 70455a462a537968757b0f813bd7ed84b0a6f1352a8e3ecc9b7e0139fd8d24c9 Dec 01 18:36:37 crc kubenswrapper[4935]: I1201 18:36:37.735287 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" event={"ID":"f6245204-c4b8-44f2-90c2-64832beb3c17","Type":"ContainerStarted","Data":"70455a462a537968757b0f813bd7ed84b0a6f1352a8e3ecc9b7e0139fd8d24c9"} Dec 01 18:36:40 crc kubenswrapper[4935]: I1201 18:36:40.758376 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" event={"ID":"f6245204-c4b8-44f2-90c2-64832beb3c17","Type":"ContainerStarted","Data":"bd372fe032729f94a59c8c64f666f119e34564444c2e16f3002ad5a88f306794"} Dec 01 18:36:41 crc kubenswrapper[4935]: I1201 18:36:41.767102 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" event={"ID":"f6245204-c4b8-44f2-90c2-64832beb3c17","Type":"ContainerStarted","Data":"9fd6e0d77756d0ef77f654df79df846c0058b02ab1974ede1f79f46e339b099a"} Dec 01 18:36:41 crc kubenswrapper[4935]: I1201 18:36:41.795357 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/prometheus-operator-db54df47d-qdmvk" podStartSLOduration=3.143223915 podStartE2EDuration="6.795329843s" podCreationTimestamp="2025-12-01 18:36:35 +0000 UTC" firstStartedPulling="2025-12-01 18:36:36.872961774 +0000 UTC m=+410.894591073" lastFinishedPulling="2025-12-01 18:36:40.525067732 +0000 UTC m=+414.546697001" observedRunningTime="2025-12-01 18:36:41.788866069 +0000 UTC m=+415.810495358" watchObservedRunningTime="2025-12-01 18:36:41.795329843 +0000 UTC m=+415.816959132" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.752902 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf"] Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.754475 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.756070 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"openshift-state-metrics-kube-rbac-proxy-config" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.756109 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"openshift-state-metrics-tls" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.756126 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"openshift-state-metrics-dockercfg-ljqbm" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.756463 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h"] Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.757522 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.759786 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-state-metrics-dockercfg-mtqhv" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.759935 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-state-metrics-kube-rbac-proxy-config" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.761170 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-state-metrics-tls" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.767479 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"kube-state-metrics-custom-resource-state-configmap" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.775162 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf"] Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.785911 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h"] Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.809197 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/node-exporter-ph4fh"] Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.810420 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.812433 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"node-exporter-tls" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.813385 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"node-exporter-dockercfg-wzjbh" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.814364 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"node-exporter-kube-rbac-proxy-config" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.872056 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"volume-directive-shadow\" (UniqueName: \"kubernetes.io/empty-dir/1b00966a-3978-4c45-a754-2da62f290a9c-volume-directive-shadow\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.872109 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-custom-resource-state-configmap\" (UniqueName: \"kubernetes.io/configmap/1b00966a-3978-4c45-a754-2da62f290a9c-kube-state-metrics-custom-resource-state-configmap\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.872142 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/f37df1d7-89a6-47b9-be65-bdb7fe94318a-openshift-state-metrics-kube-rbac-proxy-config\") pod \"openshift-state-metrics-566fddb674-9s5xf\" (UID: \"f37df1d7-89a6-47b9-be65-bdb7fe94318a\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.872211 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmlbp\" (UniqueName: \"kubernetes.io/projected/f37df1d7-89a6-47b9-be65-bdb7fe94318a-kube-api-access-xmlbp\") pod \"openshift-state-metrics-566fddb674-9s5xf\" (UID: \"f37df1d7-89a6-47b9-be65-bdb7fe94318a\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.872337 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/1b00966a-3978-4c45-a754-2da62f290a9c-kube-state-metrics-kube-rbac-proxy-config\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.872383 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/f37df1d7-89a6-47b9-be65-bdb7fe94318a-metrics-client-ca\") pod \"openshift-state-metrics-566fddb674-9s5xf\" (UID: \"f37df1d7-89a6-47b9-be65-bdb7fe94318a\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.872448 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/f37df1d7-89a6-47b9-be65-bdb7fe94318a-openshift-state-metrics-tls\") pod \"openshift-state-metrics-566fddb674-9s5xf\" (UID: \"f37df1d7-89a6-47b9-be65-bdb7fe94318a\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.872478 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/1b00966a-3978-4c45-a754-2da62f290a9c-kube-state-metrics-tls\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.872524 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8knrv\" (UniqueName: \"kubernetes.io/projected/1b00966a-3978-4c45-a754-2da62f290a9c-kube-api-access-8knrv\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.872554 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/1b00966a-3978-4c45-a754-2da62f290a9c-metrics-client-ca\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.974408 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-wtmp\" (UniqueName: \"kubernetes.io/host-path/69505b09-6fc9-48ef-8413-d07ce51e9d81-node-exporter-wtmp\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.974493 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/1b00966a-3978-4c45-a754-2da62f290a9c-metrics-client-ca\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.974534 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"volume-directive-shadow\" (UniqueName: \"kubernetes.io/empty-dir/1b00966a-3978-4c45-a754-2da62f290a9c-volume-directive-shadow\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.974560 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"root\" (UniqueName: \"kubernetes.io/host-path/69505b09-6fc9-48ef-8413-d07ce51e9d81-root\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.974593 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-custom-resource-state-configmap\" (UniqueName: \"kubernetes.io/configmap/1b00966a-3978-4c45-a754-2da62f290a9c-kube-state-metrics-custom-resource-state-configmap\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.974654 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/69505b09-6fc9-48ef-8413-d07ce51e9d81-node-exporter-kube-rbac-proxy-config\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.974771 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/f37df1d7-89a6-47b9-be65-bdb7fe94318a-openshift-state-metrics-kube-rbac-proxy-config\") pod \"openshift-state-metrics-566fddb674-9s5xf\" (UID: \"f37df1d7-89a6-47b9-be65-bdb7fe94318a\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.974889 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmlbp\" (UniqueName: \"kubernetes.io/projected/f37df1d7-89a6-47b9-be65-bdb7fe94318a-kube-api-access-xmlbp\") pod \"openshift-state-metrics-566fddb674-9s5xf\" (UID: \"f37df1d7-89a6-47b9-be65-bdb7fe94318a\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.974923 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4thj\" (UniqueName: \"kubernetes.io/projected/69505b09-6fc9-48ef-8413-d07ce51e9d81-kube-api-access-v4thj\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.974945 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-textfile\" (UniqueName: \"kubernetes.io/empty-dir/69505b09-6fc9-48ef-8413-d07ce51e9d81-node-exporter-textfile\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.974984 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/69505b09-6fc9-48ef-8413-d07ce51e9d81-sys\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.975082 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/69505b09-6fc9-48ef-8413-d07ce51e9d81-metrics-client-ca\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.975160 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/1b00966a-3978-4c45-a754-2da62f290a9c-kube-state-metrics-kube-rbac-proxy-config\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.975184 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/f37df1d7-89a6-47b9-be65-bdb7fe94318a-metrics-client-ca\") pod \"openshift-state-metrics-566fddb674-9s5xf\" (UID: \"f37df1d7-89a6-47b9-be65-bdb7fe94318a\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.975242 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/f37df1d7-89a6-47b9-be65-bdb7fe94318a-openshift-state-metrics-tls\") pod \"openshift-state-metrics-566fddb674-9s5xf\" (UID: \"f37df1d7-89a6-47b9-be65-bdb7fe94318a\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.975243 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"volume-directive-shadow\" (UniqueName: \"kubernetes.io/empty-dir/1b00966a-3978-4c45-a754-2da62f290a9c-volume-directive-shadow\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.975301 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/1b00966a-3978-4c45-a754-2da62f290a9c-kube-state-metrics-tls\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.975326 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8knrv\" (UniqueName: \"kubernetes.io/projected/1b00966a-3978-4c45-a754-2da62f290a9c-kube-api-access-8knrv\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.975350 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-tls\" (UniqueName: \"kubernetes.io/secret/69505b09-6fc9-48ef-8413-d07ce51e9d81-node-exporter-tls\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.975703 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/1b00966a-3978-4c45-a754-2da62f290a9c-metrics-client-ca\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.975877 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-custom-resource-state-configmap\" (UniqueName: \"kubernetes.io/configmap/1b00966a-3978-4c45-a754-2da62f290a9c-kube-state-metrics-custom-resource-state-configmap\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.976064 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/f37df1d7-89a6-47b9-be65-bdb7fe94318a-metrics-client-ca\") pod \"openshift-state-metrics-566fddb674-9s5xf\" (UID: \"f37df1d7-89a6-47b9-be65-bdb7fe94318a\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.982923 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/1b00966a-3978-4c45-a754-2da62f290a9c-kube-state-metrics-kube-rbac-proxy-config\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.982968 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/f37df1d7-89a6-47b9-be65-bdb7fe94318a-openshift-state-metrics-kube-rbac-proxy-config\") pod \"openshift-state-metrics-566fddb674-9s5xf\" (UID: \"f37df1d7-89a6-47b9-be65-bdb7fe94318a\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.982940 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/1b00966a-3978-4c45-a754-2da62f290a9c-kube-state-metrics-tls\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.986660 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/f37df1d7-89a6-47b9-be65-bdb7fe94318a-openshift-state-metrics-tls\") pod \"openshift-state-metrics-566fddb674-9s5xf\" (UID: \"f37df1d7-89a6-47b9-be65-bdb7fe94318a\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.992385 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmlbp\" (UniqueName: \"kubernetes.io/projected/f37df1d7-89a6-47b9-be65-bdb7fe94318a-kube-api-access-xmlbp\") pod \"openshift-state-metrics-566fddb674-9s5xf\" (UID: \"f37df1d7-89a6-47b9-be65-bdb7fe94318a\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:43 crc kubenswrapper[4935]: I1201 18:36:43.992996 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8knrv\" (UniqueName: \"kubernetes.io/projected/1b00966a-3978-4c45-a754-2da62f290a9c-kube-api-access-8knrv\") pod \"kube-state-metrics-777cb5bd5d-4ln7h\" (UID: \"1b00966a-3978-4c45-a754-2da62f290a9c\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.069319 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.100812 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.101242 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-tls\" (UniqueName: \"kubernetes.io/secret/69505b09-6fc9-48ef-8413-d07ce51e9d81-node-exporter-tls\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.101295 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-wtmp\" (UniqueName: \"kubernetes.io/host-path/69505b09-6fc9-48ef-8413-d07ce51e9d81-node-exporter-wtmp\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.101342 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"root\" (UniqueName: \"kubernetes.io/host-path/69505b09-6fc9-48ef-8413-d07ce51e9d81-root\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.101391 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/69505b09-6fc9-48ef-8413-d07ce51e9d81-node-exporter-kube-rbac-proxy-config\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.101428 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4thj\" (UniqueName: \"kubernetes.io/projected/69505b09-6fc9-48ef-8413-d07ce51e9d81-kube-api-access-v4thj\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.101454 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-textfile\" (UniqueName: \"kubernetes.io/empty-dir/69505b09-6fc9-48ef-8413-d07ce51e9d81-node-exporter-textfile\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.101481 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/69505b09-6fc9-48ef-8413-d07ce51e9d81-sys\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.101490 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"root\" (UniqueName: \"kubernetes.io/host-path/69505b09-6fc9-48ef-8413-d07ce51e9d81-root\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.101513 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/69505b09-6fc9-48ef-8413-d07ce51e9d81-metrics-client-ca\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.101791 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-wtmp\" (UniqueName: \"kubernetes.io/host-path/69505b09-6fc9-48ef-8413-d07ce51e9d81-node-exporter-wtmp\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.102349 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-textfile\" (UniqueName: \"kubernetes.io/empty-dir/69505b09-6fc9-48ef-8413-d07ce51e9d81-node-exporter-textfile\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.102401 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/69505b09-6fc9-48ef-8413-d07ce51e9d81-sys\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.102416 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/69505b09-6fc9-48ef-8413-d07ce51e9d81-metrics-client-ca\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.107697 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-tls\" (UniqueName: \"kubernetes.io/secret/69505b09-6fc9-48ef-8413-d07ce51e9d81-node-exporter-tls\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.108597 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/69505b09-6fc9-48ef-8413-d07ce51e9d81-node-exporter-kube-rbac-proxy-config\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.133732 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4thj\" (UniqueName: \"kubernetes.io/projected/69505b09-6fc9-48ef-8413-d07ce51e9d81-kube-api-access-v4thj\") pod \"node-exporter-ph4fh\" (UID: \"69505b09-6fc9-48ef-8413-d07ce51e9d81\") " pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.392232 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h"] Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.422421 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/node-exporter-ph4fh" Dec 01 18:36:44 crc kubenswrapper[4935]: W1201 18:36:44.453988 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod69505b09_6fc9_48ef_8413_d07ce51e9d81.slice/crio-8b8cb0f4c52eeeb65713111c3180d6c431a3b3f9b5cf748a33e3f3a79a1b42bf WatchSource:0}: Error finding container 8b8cb0f4c52eeeb65713111c3180d6c431a3b3f9b5cf748a33e3f3a79a1b42bf: Status 404 returned error can't find the container with id 8b8cb0f4c52eeeb65713111c3180d6c431a3b3f9b5cf748a33e3f3a79a1b42bf Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.529470 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf"] Dec 01 18:36:44 crc kubenswrapper[4935]: W1201 18:36:44.537441 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf37df1d7_89a6_47b9_be65_bdb7fe94318a.slice/crio-b782a36cd7b424dcb54356d3fc8c8d44c2a92412cf8ef0ddaed11463008c7580 WatchSource:0}: Error finding container b782a36cd7b424dcb54356d3fc8c8d44c2a92412cf8ef0ddaed11463008c7580: Status 404 returned error can't find the container with id b782a36cd7b424dcb54356d3fc8c8d44c2a92412cf8ef0ddaed11463008c7580 Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.783247 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" event={"ID":"f37df1d7-89a6-47b9-be65-bdb7fe94318a","Type":"ContainerStarted","Data":"a1b8449c990ef3724089de26e5268b59584b4c127bc78545d56dd2a4405062bf"} Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.783615 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" event={"ID":"f37df1d7-89a6-47b9-be65-bdb7fe94318a","Type":"ContainerStarted","Data":"b782a36cd7b424dcb54356d3fc8c8d44c2a92412cf8ef0ddaed11463008c7580"} Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.784410 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-ph4fh" event={"ID":"69505b09-6fc9-48ef-8413-d07ce51e9d81","Type":"ContainerStarted","Data":"8b8cb0f4c52eeeb65713111c3180d6c431a3b3f9b5cf748a33e3f3a79a1b42bf"} Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.785451 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" event={"ID":"1b00966a-3978-4c45-a754-2da62f290a9c","Type":"ContainerStarted","Data":"300bf3836bf5606773cad9063148a07d44ef9149559bb89b645fd9cbc39ddc09"} Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.846779 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/alertmanager-main-0"] Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.848497 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.851705 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-kube-rbac-proxy" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.851975 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-generated" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.851988 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-kube-rbac-proxy-metric" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.852045 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-tls-assets-0" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.852230 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-tls" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.854592 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-kube-rbac-proxy-web" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.854766 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-dockercfg-nlwdz" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.854932 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-web-config" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.861535 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"alertmanager-trusted-ca-bundle" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.869714 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/alertmanager-main-0"] Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.914268 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-alertmanager-trusted-ca-bundle\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.914348 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-metrics-client-ca\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.914549 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-config-out\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.914656 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-web-config\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.914695 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-config-volume\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.914763 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-main-tls\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-secret-alertmanager-main-tls\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.914897 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-tls-assets\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.914938 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ztjr\" (UniqueName: \"kubernetes.io/projected/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-kube-api-access-5ztjr\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.915007 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-secret-alertmanager-kube-rbac-proxy\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.915040 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-main-db\" (UniqueName: \"kubernetes.io/empty-dir/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-alertmanager-main-db\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.915088 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-secret-alertmanager-kube-rbac-proxy-web\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:44 crc kubenswrapper[4935]: I1201 18:36:44.915241 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-kube-rbac-proxy-metric\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-secret-alertmanager-kube-rbac-proxy-metric\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.016605 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-kube-rbac-proxy-metric\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-secret-alertmanager-kube-rbac-proxy-metric\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.016665 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-alertmanager-trusted-ca-bundle\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.016696 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-metrics-client-ca\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.016735 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-config-out\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.016762 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-web-config\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.016777 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-config-volume\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.016809 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-main-tls\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-secret-alertmanager-main-tls\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.016835 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-tls-assets\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.016852 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ztjr\" (UniqueName: \"kubernetes.io/projected/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-kube-api-access-5ztjr\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.016872 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-secret-alertmanager-kube-rbac-proxy\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.017331 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-main-db\" (UniqueName: \"kubernetes.io/empty-dir/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-alertmanager-main-db\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.017373 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-secret-alertmanager-kube-rbac-proxy-web\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.018640 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-alertmanager-trusted-ca-bundle\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.021584 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-metrics-client-ca\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.022257 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-main-db\" (UniqueName: \"kubernetes.io/empty-dir/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-alertmanager-main-db\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.024478 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-kube-rbac-proxy-metric\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-secret-alertmanager-kube-rbac-proxy-metric\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.024524 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-secret-alertmanager-kube-rbac-proxy-web\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.025228 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-config-out\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.025802 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-web-config\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.027005 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-config-volume\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.034225 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-secret-alertmanager-kube-rbac-proxy\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.038426 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-main-tls\" (UniqueName: \"kubernetes.io/secret/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-secret-alertmanager-main-tls\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.043303 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-tls-assets\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.044824 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ztjr\" (UniqueName: \"kubernetes.io/projected/ee68ee3a-b60e-46d1-ada4-9b6db954c33e-kube-api-access-5ztjr\") pod \"alertmanager-main-0\" (UID: \"ee68ee3a-b60e-46d1-ada4-9b6db954c33e\") " pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.161520 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/alertmanager-main-0" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.685107 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/alertmanager-main-0"] Dec 01 18:36:45 crc kubenswrapper[4935]: W1201 18:36:45.696737 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee68ee3a_b60e_46d1_ada4_9b6db954c33e.slice/crio-4e472058c92277266e59223f26915066fc2b557a7bf7d2b20f5159861f89dea9 WatchSource:0}: Error finding container 4e472058c92277266e59223f26915066fc2b557a7bf7d2b20f5159861f89dea9: Status 404 returned error can't find the container with id 4e472058c92277266e59223f26915066fc2b557a7bf7d2b20f5159861f89dea9 Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.793028 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"ee68ee3a-b60e-46d1-ada4-9b6db954c33e","Type":"ContainerStarted","Data":"4e472058c92277266e59223f26915066fc2b557a7bf7d2b20f5159861f89dea9"} Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.795310 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" event={"ID":"f37df1d7-89a6-47b9-be65-bdb7fe94318a","Type":"ContainerStarted","Data":"3a71e069d8f4c89527e06ed5bc69b50fd2357914fb402b9f84e7d7d172223815"} Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.860363 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/thanos-querier-55668896bc-ppb2l"] Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.862080 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.865686 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy-metrics" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.866061 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.866302 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy-rules" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.866595 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-tls" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.867090 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-dockercfg-jnlfg" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.868199 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-grpc-tls-8f0jlvqntk480" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.868437 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy-web" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.896558 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/thanos-querier-55668896bc-ppb2l"] Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.932567 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-tls\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-tls\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.932677 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-kube-rbac-proxy-web\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.932714 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-kube-rbac-proxy\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.932757 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-grpc-tls\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.932994 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-metrics\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-kube-rbac-proxy-metrics\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.933176 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-rules\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-kube-rbac-proxy-rules\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.933218 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km2f4\" (UniqueName: \"kubernetes.io/projected/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-kube-api-access-km2f4\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:45 crc kubenswrapper[4935]: I1201 18:36:45.933255 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-metrics-client-ca\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.034121 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-metrics\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-kube-rbac-proxy-metrics\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.034200 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-rules\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-kube-rbac-proxy-rules\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.034227 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km2f4\" (UniqueName: \"kubernetes.io/projected/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-kube-api-access-km2f4\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.034254 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-metrics-client-ca\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.034288 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-tls\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-tls\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.034312 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-kube-rbac-proxy-web\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.034337 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-kube-rbac-proxy\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.034369 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-grpc-tls\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.035871 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-metrics-client-ca\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.044235 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-tls\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-tls\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.047870 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy-rules\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-kube-rbac-proxy-rules\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.049044 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy-metrics\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-kube-rbac-proxy-metrics\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.049902 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-grpc-tls\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.050337 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-kube-rbac-proxy-web\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.065438 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-secret-thanos-querier-kube-rbac-proxy\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.067319 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km2f4\" (UniqueName: \"kubernetes.io/projected/bbd6b497-fe17-45e9-a5ef-2f4e1101389a-kube-api-access-km2f4\") pod \"thanos-querier-55668896bc-ppb2l\" (UID: \"bbd6b497-fe17-45e9-a5ef-2f4e1101389a\") " pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.186879 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:46 crc kubenswrapper[4935]: I1201 18:36:46.676965 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/thanos-querier-55668896bc-ppb2l"] Dec 01 18:36:47 crc kubenswrapper[4935]: W1201 18:36:47.045662 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbbd6b497_fe17_45e9_a5ef_2f4e1101389a.slice/crio-1d8ce0e8d03f0aefe618b298e408103e505ea8aca119d8d72818b4a6ea484ca1 WatchSource:0}: Error finding container 1d8ce0e8d03f0aefe618b298e408103e505ea8aca119d8d72818b4a6ea484ca1: Status 404 returned error can't find the container with id 1d8ce0e8d03f0aefe618b298e408103e505ea8aca119d8d72818b4a6ea484ca1 Dec 01 18:36:47 crc kubenswrapper[4935]: I1201 18:36:47.810193 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" event={"ID":"bbd6b497-fe17-45e9-a5ef-2f4e1101389a","Type":"ContainerStarted","Data":"1d8ce0e8d03f0aefe618b298e408103e505ea8aca119d8d72818b4a6ea484ca1"} Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.791971 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6fccdf9d4b-hw5kb"] Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.795022 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.804782 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6fccdf9d4b-hw5kb"] Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.849860 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" event={"ID":"1b00966a-3978-4c45-a754-2da62f290a9c","Type":"ContainerStarted","Data":"411939ff4ac44c9e3880ce8282027edc918228c68035f888af3683c740c476db"} Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.849941 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" event={"ID":"1b00966a-3978-4c45-a754-2da62f290a9c","Type":"ContainerStarted","Data":"9697069a7b934a500d6ad2c8aa029b6154380183380bfea868904622ca22d202"} Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.852217 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" event={"ID":"f37df1d7-89a6-47b9-be65-bdb7fe94318a","Type":"ContainerStarted","Data":"0619277d7409e9b369ee7a63761b7c33104447676ce185f2be9419e26e58fd14"} Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.867442 4935 generic.go:334] "Generic (PLEG): container finished" podID="ee68ee3a-b60e-46d1-ada4-9b6db954c33e" containerID="8dd6f08db951191d65596ad1ecb6c4f212d26a678eba9639c1c6fea398fac00e" exitCode=0 Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.867542 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"ee68ee3a-b60e-46d1-ada4-9b6db954c33e","Type":"ContainerDied","Data":"8dd6f08db951191d65596ad1ecb6c4f212d26a678eba9639c1c6fea398fac00e"} Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.877967 4935 generic.go:334] "Generic (PLEG): container finished" podID="69505b09-6fc9-48ef-8413-d07ce51e9d81" containerID="27b83e40610c86b02a228b98ccca67bdaea72a4dace70ac4e41402627b4a765f" exitCode=0 Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.878035 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-ph4fh" event={"ID":"69505b09-6fc9-48ef-8413-d07ce51e9d81","Type":"ContainerDied","Data":"27b83e40610c86b02a228b98ccca67bdaea72a4dace70ac4e41402627b4a765f"} Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.882728 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-serving-cert\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.882776 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-trusted-ca-bundle\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.882803 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-config\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.882839 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zkh8\" (UniqueName: \"kubernetes.io/projected/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-kube-api-access-8zkh8\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.882863 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-service-ca\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.882921 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-oauth-config\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.882962 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-oauth-serving-cert\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.925245 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/openshift-state-metrics-566fddb674-9s5xf" podStartSLOduration=2.881568952 podStartE2EDuration="5.925227808s" podCreationTimestamp="2025-12-01 18:36:43 +0000 UTC" firstStartedPulling="2025-12-01 18:36:44.808370462 +0000 UTC m=+418.829999721" lastFinishedPulling="2025-12-01 18:36:47.852029318 +0000 UTC m=+421.873658577" observedRunningTime="2025-12-01 18:36:48.877505288 +0000 UTC m=+422.899134547" watchObservedRunningTime="2025-12-01 18:36:48.925227808 +0000 UTC m=+422.946857067" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.983989 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-oauth-serving-cert\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.984105 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-serving-cert\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.984132 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-trusted-ca-bundle\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.984175 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-config\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.984274 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zkh8\" (UniqueName: \"kubernetes.io/projected/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-kube-api-access-8zkh8\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.984327 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-service-ca\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.984467 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-oauth-config\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.985449 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-config\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.985576 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-trusted-ca-bundle\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.985630 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-service-ca\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.985940 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-oauth-serving-cert\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:48 crc kubenswrapper[4935]: I1201 18:36:48.991792 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-serving-cert\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:49 crc kubenswrapper[4935]: I1201 18:36:49.001201 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-oauth-config\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:49 crc kubenswrapper[4935]: I1201 18:36:49.004925 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zkh8\" (UniqueName: \"kubernetes.io/projected/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-kube-api-access-8zkh8\") pod \"console-6fccdf9d4b-hw5kb\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:49 crc kubenswrapper[4935]: I1201 18:36:49.136227 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/metrics-server-67dccdf457-pfhr5"] Dec 01 18:36:49 crc kubenswrapper[4935]: I1201 18:36:49.136915 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:49 crc kubenswrapper[4935]: I1201 18:36:49.141227 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:49 crc kubenswrapper[4935]: I1201 18:36:49.142030 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"kubelet-serving-ca-bundle" Dec 01 18:36:49 crc kubenswrapper[4935]: I1201 18:36:49.142099 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-server-e7lef7k5d2p5i" Dec 01 18:36:49 crc kubenswrapper[4935]: I1201 18:36:49.142165 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-server-tls" Dec 01 18:36:49 crc kubenswrapper[4935]: I1201 18:36:49.142250 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-server-dockercfg-97r7k" Dec 01 18:36:49 crc kubenswrapper[4935]: I1201 18:36:49.142333 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"metrics-server-audit-profiles" Dec 01 18:36:49 crc kubenswrapper[4935]: I1201 18:36:49.142405 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-client-certs" Dec 01 18:36:49 crc kubenswrapper[4935]: I1201 18:36:49.152718 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/metrics-server-67dccdf457-pfhr5"] Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.289015 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-server-audit-profiles\" (UniqueName: \"kubernetes.io/configmap/d552028c-c467-4640-a164-66283cc6ba3b-metrics-server-audit-profiles\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.289459 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d552028c-c467-4640-a164-66283cc6ba3b-client-ca-bundle\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.289511 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-metrics-server-tls\" (UniqueName: \"kubernetes.io/secret/d552028c-c467-4640-a164-66283cc6ba3b-secret-metrics-server-tls\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.289548 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/d552028c-c467-4640-a164-66283cc6ba3b-secret-metrics-client-certs\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.289604 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d552028c-c467-4640-a164-66283cc6ba3b-configmap-kubelet-serving-ca-bundle\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.289642 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-log\" (UniqueName: \"kubernetes.io/empty-dir/d552028c-c467-4640-a164-66283cc6ba3b-audit-log\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.289709 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zvzp\" (UniqueName: \"kubernetes.io/projected/d552028c-c467-4640-a164-66283cc6ba3b-kube-api-access-5zvzp\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.390951 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-metrics-server-tls\" (UniqueName: \"kubernetes.io/secret/d552028c-c467-4640-a164-66283cc6ba3b-secret-metrics-server-tls\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.391023 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/d552028c-c467-4640-a164-66283cc6ba3b-secret-metrics-client-certs\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.391085 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d552028c-c467-4640-a164-66283cc6ba3b-configmap-kubelet-serving-ca-bundle\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.391132 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-log\" (UniqueName: \"kubernetes.io/empty-dir/d552028c-c467-4640-a164-66283cc6ba3b-audit-log\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.391194 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zvzp\" (UniqueName: \"kubernetes.io/projected/d552028c-c467-4640-a164-66283cc6ba3b-kube-api-access-5zvzp\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.391225 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-server-audit-profiles\" (UniqueName: \"kubernetes.io/configmap/d552028c-c467-4640-a164-66283cc6ba3b-metrics-server-audit-profiles\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.391256 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d552028c-c467-4640-a164-66283cc6ba3b-client-ca-bundle\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.392645 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-log\" (UniqueName: \"kubernetes.io/empty-dir/d552028c-c467-4640-a164-66283cc6ba3b-audit-log\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.392691 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-server-audit-profiles\" (UniqueName: \"kubernetes.io/configmap/d552028c-c467-4640-a164-66283cc6ba3b-metrics-server-audit-profiles\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.392693 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d552028c-c467-4640-a164-66283cc6ba3b-configmap-kubelet-serving-ca-bundle\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.397892 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-metrics-server-tls\" (UniqueName: \"kubernetes.io/secret/d552028c-c467-4640-a164-66283cc6ba3b-secret-metrics-server-tls\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.399545 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d552028c-c467-4640-a164-66283cc6ba3b-client-ca-bundle\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.403869 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/d552028c-c467-4640-a164-66283cc6ba3b-secret-metrics-client-certs\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.410464 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zvzp\" (UniqueName: \"kubernetes.io/projected/d552028c-c467-4640-a164-66283cc6ba3b-kube-api-access-5zvzp\") pod \"metrics-server-67dccdf457-pfhr5\" (UID: \"d552028c-c467-4640-a164-66283cc6ba3b\") " pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.492480 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.522012 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/monitoring-plugin-8669468879-5npfp"] Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.522855 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/monitoring-plugin-8669468879-5npfp" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.525624 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"default-dockercfg-6tstp" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.526000 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"monitoring-plugin-cert" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.555822 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/monitoring-plugin-8669468879-5npfp"] Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.555879 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6fccdf9d4b-hw5kb"] Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.594699 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"monitoring-plugin-cert\" (UniqueName: \"kubernetes.io/secret/fb8a9aa4-dfb3-44a6-ac60-1caa46add655-monitoring-plugin-cert\") pod \"monitoring-plugin-8669468879-5npfp\" (UID: \"fb8a9aa4-dfb3-44a6-ac60-1caa46add655\") " pod="openshift-monitoring/monitoring-plugin-8669468879-5npfp" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.695743 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"monitoring-plugin-cert\" (UniqueName: \"kubernetes.io/secret/fb8a9aa4-dfb3-44a6-ac60-1caa46add655-monitoring-plugin-cert\") pod \"monitoring-plugin-8669468879-5npfp\" (UID: \"fb8a9aa4-dfb3-44a6-ac60-1caa46add655\") " pod="openshift-monitoring/monitoring-plugin-8669468879-5npfp" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.700364 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"monitoring-plugin-cert\" (UniqueName: \"kubernetes.io/secret/fb8a9aa4-dfb3-44a6-ac60-1caa46add655-monitoring-plugin-cert\") pod \"monitoring-plugin-8669468879-5npfp\" (UID: \"fb8a9aa4-dfb3-44a6-ac60-1caa46add655\") " pod="openshift-monitoring/monitoring-plugin-8669468879-5npfp" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.887780 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/monitoring-plugin-8669468879-5npfp" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.919930 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-ph4fh" event={"ID":"69505b09-6fc9-48ef-8413-d07ce51e9d81","Type":"ContainerStarted","Data":"6e11b3eba5636b8f02cccd65a0d32e71ee0555c1d6f266632ac8de3cabc43512"} Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.919963 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-ph4fh" event={"ID":"69505b09-6fc9-48ef-8413-d07ce51e9d81","Type":"ContainerStarted","Data":"1707dea1fce5996f775efed192c18852c27f1e61a75ef3c7da164df5814510ce"} Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.922178 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" event={"ID":"1b00966a-3978-4c45-a754-2da62f290a9c","Type":"ContainerStarted","Data":"19b83cb44624e311bee017cfaf16708e6beb35b682008fbf2739af329e80472c"} Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.946452 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/node-exporter-ph4fh" podStartSLOduration=3.553171991 podStartE2EDuration="6.946434893s" podCreationTimestamp="2025-12-01 18:36:43 +0000 UTC" firstStartedPulling="2025-12-01 18:36:44.45954881 +0000 UTC m=+418.481178069" lastFinishedPulling="2025-12-01 18:36:47.852811712 +0000 UTC m=+421.874440971" observedRunningTime="2025-12-01 18:36:49.939923448 +0000 UTC m=+423.961552907" watchObservedRunningTime="2025-12-01 18:36:49.946434893 +0000 UTC m=+423.968064152" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:49.960695 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-4ln7h" podStartSLOduration=3.5065281649999998 podStartE2EDuration="6.960677263s" podCreationTimestamp="2025-12-01 18:36:43 +0000 UTC" firstStartedPulling="2025-12-01 18:36:44.397916481 +0000 UTC m=+418.419545750" lastFinishedPulling="2025-12-01 18:36:47.852065549 +0000 UTC m=+421.873694848" observedRunningTime="2025-12-01 18:36:49.959025642 +0000 UTC m=+423.980654901" watchObservedRunningTime="2025-12-01 18:36:49.960677263 +0000 UTC m=+423.982306522" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.143331 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/prometheus-k8s-0"] Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.145867 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.153122 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"serving-certs-ca-bundle" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.153521 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-tls-assets-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.154783 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-thanos-prometheus-http-client-file" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.154999 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-web-config" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.156829 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"prometheus-k8s-rulefiles-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.156922 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-kube-rbac-proxy-web" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.157066 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-grpc-tls-cbo4dp5gveoi0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.157181 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-dockercfg-4z8vl" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.157265 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-rbac-proxy" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.157346 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.157357 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-thanos-sidecar-tls" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.157418 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-tls" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.158424 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"prometheus-trusted-ca-bundle" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.175515 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-k8s-0"] Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206042 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-kube-rbac-proxy\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206089 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnrz9\" (UniqueName: \"kubernetes.io/projected/178e5392-5bb7-473d-a383-dcb9f9079b9c-kube-api-access-rnrz9\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206120 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-prometheus-k8s-thanos-sidecar-tls\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-prometheus-k8s-thanos-sidecar-tls\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206140 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-k8s-db\" (UniqueName: \"kubernetes.io/empty-dir/178e5392-5bb7-473d-a383-dcb9f9079b9c-prometheus-k8s-db\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206177 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-prometheus-trusted-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206201 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/178e5392-5bb7-473d-a383-dcb9f9079b9c-config-out\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206228 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-configmap-serving-certs-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206258 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-prometheus-k8s-tls\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-prometheus-k8s-tls\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206276 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-config\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206294 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-metrics-client-certs\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206315 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-thanos-prometheus-http-client-file\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206334 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-configmap-metrics-client-ca\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206357 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-web-config\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206380 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/178e5392-5bb7-473d-a383-dcb9f9079b9c-tls-assets\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206406 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-prometheus-k8s-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-prometheus-k8s-kube-rbac-proxy-web\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206425 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-k8s-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-prometheus-k8s-rulefiles-0\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206443 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-grpc-tls\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.206467 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-configmap-kubelet-serving-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.311881 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-prometheus-k8s-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-prometheus-k8s-kube-rbac-proxy-web\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.311944 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-k8s-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-prometheus-k8s-rulefiles-0\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.311970 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-grpc-tls\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.311998 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-configmap-kubelet-serving-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312033 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-kube-rbac-proxy\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312061 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnrz9\" (UniqueName: \"kubernetes.io/projected/178e5392-5bb7-473d-a383-dcb9f9079b9c-kube-api-access-rnrz9\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312089 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-prometheus-k8s-thanos-sidecar-tls\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-prometheus-k8s-thanos-sidecar-tls\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312107 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-k8s-db\" (UniqueName: \"kubernetes.io/empty-dir/178e5392-5bb7-473d-a383-dcb9f9079b9c-prometheus-k8s-db\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312125 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-prometheus-trusted-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312174 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/178e5392-5bb7-473d-a383-dcb9f9079b9c-config-out\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312213 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-configmap-serving-certs-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312241 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-prometheus-k8s-tls\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-prometheus-k8s-tls\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312264 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-config\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312284 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-metrics-client-certs\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312306 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-configmap-metrics-client-ca\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312328 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-thanos-prometheus-http-client-file\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312357 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-web-config\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.312385 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/178e5392-5bb7-473d-a383-dcb9f9079b9c-tls-assets\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.316601 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-configmap-kubelet-serving-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.316622 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-configmap-metrics-client-ca\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.318337 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-prometheus-trusted-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.318388 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-configmap-serving-certs-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.319090 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-k8s-db\" (UniqueName: \"kubernetes.io/empty-dir/178e5392-5bb7-473d-a383-dcb9f9079b9c-prometheus-k8s-db\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.319357 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-metrics-client-certs\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.323354 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-k8s-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/178e5392-5bb7-473d-a383-dcb9f9079b9c-prometheus-k8s-rulefiles-0\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.328990 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-prometheus-k8s-thanos-sidecar-tls\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-prometheus-k8s-thanos-sidecar-tls\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.330063 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-config\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.334590 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-grpc-tls\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.318275 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/178e5392-5bb7-473d-a383-dcb9f9079b9c-tls-assets\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.334833 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnrz9\" (UniqueName: \"kubernetes.io/projected/178e5392-5bb7-473d-a383-dcb9f9079b9c-kube-api-access-rnrz9\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.335391 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/178e5392-5bb7-473d-a383-dcb9f9079b9c-config-out\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.335488 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-kube-rbac-proxy\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.335958 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-thanos-prometheus-http-client-file\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.336329 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-prometheus-k8s-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-prometheus-k8s-kube-rbac-proxy-web\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.336397 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-web-config\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.337989 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-prometheus-k8s-tls\" (UniqueName: \"kubernetes.io/secret/178e5392-5bb7-473d-a383-dcb9f9079b9c-secret-prometheus-k8s-tls\") pod \"prometheus-k8s-0\" (UID: \"178e5392-5bb7-473d-a383-dcb9f9079b9c\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.462496 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.932082 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6fccdf9d4b-hw5kb" event={"ID":"0da4df77-1cb0-4dae-b1c6-d1adf88bf205","Type":"ContainerStarted","Data":"6f58a44279259ca501648d0885fe48a435d35e1d9c797b6457b81ef799874d74"} Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.932478 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6fccdf9d4b-hw5kb" event={"ID":"0da4df77-1cb0-4dae-b1c6-d1adf88bf205","Type":"ContainerStarted","Data":"e3f70f0dc97a8f33399a54c32f62617800be9f18455a4b8a3e54018d9882a42e"} Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.937573 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" event={"ID":"bbd6b497-fe17-45e9-a5ef-2f4e1101389a","Type":"ContainerStarted","Data":"7e4dac9d47c8733359f9a0b4ae98b25d58125b52bbfad87fa7585b4ed84debee"} Dec 01 18:36:50 crc kubenswrapper[4935]: I1201 18:36:50.958314 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6fccdf9d4b-hw5kb" podStartSLOduration=2.958295383 podStartE2EDuration="2.958295383s" podCreationTimestamp="2025-12-01 18:36:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:36:50.951186749 +0000 UTC m=+424.972816008" watchObservedRunningTime="2025-12-01 18:36:50.958295383 +0000 UTC m=+424.979924632" Dec 01 18:36:51 crc kubenswrapper[4935]: I1201 18:36:51.048497 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/monitoring-plugin-8669468879-5npfp"] Dec 01 18:36:51 crc kubenswrapper[4935]: I1201 18:36:51.116359 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/metrics-server-67dccdf457-pfhr5"] Dec 01 18:36:51 crc kubenswrapper[4935]: W1201 18:36:51.145256 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd552028c_c467_4640_a164_66283cc6ba3b.slice/crio-df83055f3b80cd0f46d3572972661291d26b3b59125ec49612be18e13ed46dcb WatchSource:0}: Error finding container df83055f3b80cd0f46d3572972661291d26b3b59125ec49612be18e13ed46dcb: Status 404 returned error can't find the container with id df83055f3b80cd0f46d3572972661291d26b3b59125ec49612be18e13ed46dcb Dec 01 18:36:51 crc kubenswrapper[4935]: I1201 18:36:51.212896 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-k8s-0"] Dec 01 18:36:51 crc kubenswrapper[4935]: W1201 18:36:51.219843 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod178e5392_5bb7_473d_a383_dcb9f9079b9c.slice/crio-85ede8d4bfa123faf6dc43de12324f40ad3da5ca406e9ac008d0ff618923676f WatchSource:0}: Error finding container 85ede8d4bfa123faf6dc43de12324f40ad3da5ca406e9ac008d0ff618923676f: Status 404 returned error can't find the container with id 85ede8d4bfa123faf6dc43de12324f40ad3da5ca406e9ac008d0ff618923676f Dec 01 18:36:51 crc kubenswrapper[4935]: I1201 18:36:51.945367 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" event={"ID":"d552028c-c467-4640-a164-66283cc6ba3b","Type":"ContainerStarted","Data":"df83055f3b80cd0f46d3572972661291d26b3b59125ec49612be18e13ed46dcb"} Dec 01 18:36:51 crc kubenswrapper[4935]: I1201 18:36:51.946814 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/monitoring-plugin-8669468879-5npfp" event={"ID":"fb8a9aa4-dfb3-44a6-ac60-1caa46add655","Type":"ContainerStarted","Data":"a6bbce72122e41bf8f65101df86228f00177f0771194c1c052761d9805225e6c"} Dec 01 18:36:51 crc kubenswrapper[4935]: I1201 18:36:51.949334 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" event={"ID":"bbd6b497-fe17-45e9-a5ef-2f4e1101389a","Type":"ContainerStarted","Data":"93edf68fa891cade14ba4e445994997c4e480ed6e128df945dad8ceda9d69b6f"} Dec 01 18:36:51 crc kubenswrapper[4935]: I1201 18:36:51.949358 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" event={"ID":"bbd6b497-fe17-45e9-a5ef-2f4e1101389a","Type":"ContainerStarted","Data":"e5c2af3bf453216d7ddb1ba2d214d5113e8e4926717f3098e7dada950f63f919"} Dec 01 18:36:51 crc kubenswrapper[4935]: I1201 18:36:51.950629 4935 generic.go:334] "Generic (PLEG): container finished" podID="178e5392-5bb7-473d-a383-dcb9f9079b9c" containerID="fdbb6d498f57dc98ab49f6489f334cdfc16ab36db3199824bd4ca1b02f33d031" exitCode=0 Dec 01 18:36:51 crc kubenswrapper[4935]: I1201 18:36:51.950724 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"178e5392-5bb7-473d-a383-dcb9f9079b9c","Type":"ContainerDied","Data":"fdbb6d498f57dc98ab49f6489f334cdfc16ab36db3199824bd4ca1b02f33d031"} Dec 01 18:36:51 crc kubenswrapper[4935]: I1201 18:36:51.950796 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"178e5392-5bb7-473d-a383-dcb9f9079b9c","Type":"ContainerStarted","Data":"85ede8d4bfa123faf6dc43de12324f40ad3da5ca406e9ac008d0ff618923676f"} Dec 01 18:36:52 crc kubenswrapper[4935]: I1201 18:36:52.957219 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"ee68ee3a-b60e-46d1-ada4-9b6db954c33e","Type":"ContainerStarted","Data":"1054ef77b5761fc0fe392ea7a3b593492fc9c88657da95d1dc740e67077c68b6"} Dec 01 18:36:52 crc kubenswrapper[4935]: I1201 18:36:52.957519 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"ee68ee3a-b60e-46d1-ada4-9b6db954c33e","Type":"ContainerStarted","Data":"443651141b3f3ce6353242cb3bf01750a4f8f087f7717c4f5ba171c44ff312f6"} Dec 01 18:36:52 crc kubenswrapper[4935]: I1201 18:36:52.957529 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"ee68ee3a-b60e-46d1-ada4-9b6db954c33e","Type":"ContainerStarted","Data":"2dfb4bad6c4b48820f963e683c8eee8343cf622204095b311fa74915a37730c1"} Dec 01 18:36:53 crc kubenswrapper[4935]: I1201 18:36:53.970454 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"ee68ee3a-b60e-46d1-ada4-9b6db954c33e","Type":"ContainerStarted","Data":"dd9ab48de4200acec6bdf8a87625975d1f3e90c20c70e872342a95a5b04dce2d"} Dec 01 18:36:54 crc kubenswrapper[4935]: I1201 18:36:54.979566 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"ee68ee3a-b60e-46d1-ada4-9b6db954c33e","Type":"ContainerStarted","Data":"193a3bdd37b83bcff2779e7336d1e783d13d30bcba34175433ec4d38cfe54b5b"} Dec 01 18:36:54 crc kubenswrapper[4935]: I1201 18:36:54.979918 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"ee68ee3a-b60e-46d1-ada4-9b6db954c33e","Type":"ContainerStarted","Data":"a81ac966b7f47a744294baf2a24416ec05051c3316832b0b989f690a953bf87e"} Dec 01 18:36:54 crc kubenswrapper[4935]: I1201 18:36:54.982975 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" event={"ID":"bbd6b497-fe17-45e9-a5ef-2f4e1101389a","Type":"ContainerStarted","Data":"2fc0e865e91d263a7ef2af2d4ba8f9958cbd1a72577828637ad1989720c4a483"} Dec 01 18:36:54 crc kubenswrapper[4935]: I1201 18:36:54.983041 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" event={"ID":"bbd6b497-fe17-45e9-a5ef-2f4e1101389a","Type":"ContainerStarted","Data":"59af827e5f686c99526419b87d88fca605dc81347cd81eaa3a349b0ba5d2dc68"} Dec 01 18:36:54 crc kubenswrapper[4935]: I1201 18:36:54.983062 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" event={"ID":"bbd6b497-fe17-45e9-a5ef-2f4e1101389a","Type":"ContainerStarted","Data":"769bb8675898bcddd44f250b87223eb20b6567d97ea79de53a9d8260971af33a"} Dec 01 18:36:54 crc kubenswrapper[4935]: I1201 18:36:54.983175 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:54 crc kubenswrapper[4935]: I1201 18:36:54.984677 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" event={"ID":"d552028c-c467-4640-a164-66283cc6ba3b","Type":"ContainerStarted","Data":"33834418ed228a0d35112bea7e99d4de816fcb5f3fac8f8a453ed64491af77f4"} Dec 01 18:36:54 crc kubenswrapper[4935]: I1201 18:36:54.986100 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/monitoring-plugin-8669468879-5npfp" event={"ID":"fb8a9aa4-dfb3-44a6-ac60-1caa46add655","Type":"ContainerStarted","Data":"e18b7b81664fe9dda73608d41bafdf13f4bd0320017858025470e260ba6576c8"} Dec 01 18:36:54 crc kubenswrapper[4935]: I1201 18:36:54.986490 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/monitoring-plugin-8669468879-5npfp" Dec 01 18:36:54 crc kubenswrapper[4935]: I1201 18:36:54.995065 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/monitoring-plugin-8669468879-5npfp" Dec 01 18:36:55 crc kubenswrapper[4935]: I1201 18:36:55.022857 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/alertmanager-main-0" podStartSLOduration=4.488374952 podStartE2EDuration="11.022835264s" podCreationTimestamp="2025-12-01 18:36:44 +0000 UTC" firstStartedPulling="2025-12-01 18:36:45.698914626 +0000 UTC m=+419.720543895" lastFinishedPulling="2025-12-01 18:36:52.233374918 +0000 UTC m=+426.255004207" observedRunningTime="2025-12-01 18:36:55.018312391 +0000 UTC m=+429.039941690" watchObservedRunningTime="2025-12-01 18:36:55.022835264 +0000 UTC m=+429.044464523" Dec 01 18:36:55 crc kubenswrapper[4935]: I1201 18:36:55.051705 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/monitoring-plugin-8669468879-5npfp" podStartSLOduration=3.212288501 podStartE2EDuration="6.051673926s" podCreationTimestamp="2025-12-01 18:36:49 +0000 UTC" firstStartedPulling="2025-12-01 18:36:51.060413113 +0000 UTC m=+425.082042372" lastFinishedPulling="2025-12-01 18:36:53.899798528 +0000 UTC m=+427.921427797" observedRunningTime="2025-12-01 18:36:55.034914136 +0000 UTC m=+429.056543445" watchObservedRunningTime="2025-12-01 18:36:55.051673926 +0000 UTC m=+429.073303215" Dec 01 18:36:55 crc kubenswrapper[4935]: I1201 18:36:55.074618 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" podStartSLOduration=3.246616588 podStartE2EDuration="10.074584661s" podCreationTimestamp="2025-12-01 18:36:45 +0000 UTC" firstStartedPulling="2025-12-01 18:36:47.049366724 +0000 UTC m=+421.070996003" lastFinishedPulling="2025-12-01 18:36:53.877334777 +0000 UTC m=+427.898964076" observedRunningTime="2025-12-01 18:36:55.074353694 +0000 UTC m=+429.095982973" watchObservedRunningTime="2025-12-01 18:36:55.074584661 +0000 UTC m=+429.096213960" Dec 01 18:36:55 crc kubenswrapper[4935]: I1201 18:36:55.099113 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" podStartSLOduration=3.345822544 podStartE2EDuration="6.099082635s" podCreationTimestamp="2025-12-01 18:36:49 +0000 UTC" firstStartedPulling="2025-12-01 18:36:51.147238969 +0000 UTC m=+425.168868228" lastFinishedPulling="2025-12-01 18:36:53.90049905 +0000 UTC m=+427.922128319" observedRunningTime="2025-12-01 18:36:55.090412341 +0000 UTC m=+429.112041620" watchObservedRunningTime="2025-12-01 18:36:55.099082635 +0000 UTC m=+429.120711904" Dec 01 18:36:56 crc kubenswrapper[4935]: I1201 18:36:56.009269 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/thanos-querier-55668896bc-ppb2l" Dec 01 18:36:59 crc kubenswrapper[4935]: I1201 18:36:59.142835 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:59 crc kubenswrapper[4935]: I1201 18:36:59.143242 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:36:59 crc kubenswrapper[4935]: I1201 18:36:59.147737 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:37:00 crc kubenswrapper[4935]: I1201 18:37:00.024686 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"178e5392-5bb7-473d-a383-dcb9f9079b9c","Type":"ContainerStarted","Data":"3a146148dbd5e611cc31e16bb704169b5a13d119e40e8356f15993fa325beaca"} Dec 01 18:37:00 crc kubenswrapper[4935]: I1201 18:37:00.025341 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"178e5392-5bb7-473d-a383-dcb9f9079b9c","Type":"ContainerStarted","Data":"2b5d2577c2791518a61755c8a8ed8e04b1518c3107e396516b2ca477c8541fdc"} Dec 01 18:37:00 crc kubenswrapper[4935]: I1201 18:37:00.029123 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:37:00 crc kubenswrapper[4935]: I1201 18:37:00.079032 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-dbvg7"] Dec 01 18:37:01 crc kubenswrapper[4935]: I1201 18:37:01.039323 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"178e5392-5bb7-473d-a383-dcb9f9079b9c","Type":"ContainerStarted","Data":"66bc094b3736cc69714e29cb7a31c14908ba2021d56d215a4504d043b835949c"} Dec 01 18:37:01 crc kubenswrapper[4935]: I1201 18:37:01.039828 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"178e5392-5bb7-473d-a383-dcb9f9079b9c","Type":"ContainerStarted","Data":"8a3557e590c52b803b34fae93bda0610110a618276bd668103ab0b7eaf0b2345"} Dec 01 18:37:01 crc kubenswrapper[4935]: I1201 18:37:01.039860 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"178e5392-5bb7-473d-a383-dcb9f9079b9c","Type":"ContainerStarted","Data":"9dcc296b0f84ce187e745150b2dcbd7c82c2bdeb14b544d776b268339aa19fe0"} Dec 01 18:37:01 crc kubenswrapper[4935]: I1201 18:37:01.039879 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"178e5392-5bb7-473d-a383-dcb9f9079b9c","Type":"ContainerStarted","Data":"2c4884b34265f59b36370dd1b941876a1dbfa77683ec7128938972663d191d32"} Dec 01 18:37:01 crc kubenswrapper[4935]: I1201 18:37:01.088549 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/prometheus-k8s-0" podStartSLOduration=3.499653334 podStartE2EDuration="11.088525912s" podCreationTimestamp="2025-12-01 18:36:50 +0000 UTC" firstStartedPulling="2025-12-01 18:36:51.95233768 +0000 UTC m=+425.973966979" lastFinishedPulling="2025-12-01 18:36:59.541210298 +0000 UTC m=+433.562839557" observedRunningTime="2025-12-01 18:37:01.087945584 +0000 UTC m=+435.109574933" watchObservedRunningTime="2025-12-01 18:37:01.088525912 +0000 UTC m=+435.110155201" Dec 01 18:37:05 crc kubenswrapper[4935]: I1201 18:37:05.463975 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:37:09 crc kubenswrapper[4935]: I1201 18:37:09.493603 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:37:09 crc kubenswrapper[4935]: I1201 18:37:09.493970 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:37:25 crc kubenswrapper[4935]: I1201 18:37:25.147830 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-dbvg7" podUID="6881ae5d-31b3-4749-bd1a-db65599d48d3" containerName="console" containerID="cri-o://b60f0ad2276aa44c0548c1ddb4f2ab6cf35726a57a9cd2077bcfc7ebebc249fe" gracePeriod=15 Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.077972 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-dbvg7_6881ae5d-31b3-4749-bd1a-db65599d48d3/console/0.log" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.078042 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.172529 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmhvv\" (UniqueName: \"kubernetes.io/projected/6881ae5d-31b3-4749-bd1a-db65599d48d3-kube-api-access-nmhvv\") pod \"6881ae5d-31b3-4749-bd1a-db65599d48d3\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.172610 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-oauth-config\") pod \"6881ae5d-31b3-4749-bd1a-db65599d48d3\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.172668 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-trusted-ca-bundle\") pod \"6881ae5d-31b3-4749-bd1a-db65599d48d3\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.172687 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-serving-cert\") pod \"6881ae5d-31b3-4749-bd1a-db65599d48d3\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.172764 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-oauth-serving-cert\") pod \"6881ae5d-31b3-4749-bd1a-db65599d48d3\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.172789 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-config\") pod \"6881ae5d-31b3-4749-bd1a-db65599d48d3\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.172859 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-service-ca\") pod \"6881ae5d-31b3-4749-bd1a-db65599d48d3\" (UID: \"6881ae5d-31b3-4749-bd1a-db65599d48d3\") " Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.173656 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-service-ca" (OuterVolumeSpecName: "service-ca") pod "6881ae5d-31b3-4749-bd1a-db65599d48d3" (UID: "6881ae5d-31b3-4749-bd1a-db65599d48d3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.174115 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6881ae5d-31b3-4749-bd1a-db65599d48d3" (UID: "6881ae5d-31b3-4749-bd1a-db65599d48d3"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.174470 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-config" (OuterVolumeSpecName: "console-config") pod "6881ae5d-31b3-4749-bd1a-db65599d48d3" (UID: "6881ae5d-31b3-4749-bd1a-db65599d48d3"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.174450 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "6881ae5d-31b3-4749-bd1a-db65599d48d3" (UID: "6881ae5d-31b3-4749-bd1a-db65599d48d3"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.179742 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "6881ae5d-31b3-4749-bd1a-db65599d48d3" (UID: "6881ae5d-31b3-4749-bd1a-db65599d48d3"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.181697 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6881ae5d-31b3-4749-bd1a-db65599d48d3-kube-api-access-nmhvv" (OuterVolumeSpecName: "kube-api-access-nmhvv") pod "6881ae5d-31b3-4749-bd1a-db65599d48d3" (UID: "6881ae5d-31b3-4749-bd1a-db65599d48d3"). InnerVolumeSpecName "kube-api-access-nmhvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.182800 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "6881ae5d-31b3-4749-bd1a-db65599d48d3" (UID: "6881ae5d-31b3-4749-bd1a-db65599d48d3"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.245318 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-dbvg7_6881ae5d-31b3-4749-bd1a-db65599d48d3/console/0.log" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.245573 4935 generic.go:334] "Generic (PLEG): container finished" podID="6881ae5d-31b3-4749-bd1a-db65599d48d3" containerID="b60f0ad2276aa44c0548c1ddb4f2ab6cf35726a57a9cd2077bcfc7ebebc249fe" exitCode=2 Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.245650 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dbvg7" event={"ID":"6881ae5d-31b3-4749-bd1a-db65599d48d3","Type":"ContainerDied","Data":"b60f0ad2276aa44c0548c1ddb4f2ab6cf35726a57a9cd2077bcfc7ebebc249fe"} Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.245886 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dbvg7" event={"ID":"6881ae5d-31b3-4749-bd1a-db65599d48d3","Type":"ContainerDied","Data":"a2d8b817d83f17d308a90a14a31de72475bdd9d2ecde24019ff758253bf0b222"} Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.245677 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dbvg7" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.245931 4935 scope.go:117] "RemoveContainer" containerID="b60f0ad2276aa44c0548c1ddb4f2ab6cf35726a57a9cd2077bcfc7ebebc249fe" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.274442 4935 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.274679 4935 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.274803 4935 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.274938 4935 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.275046 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmhvv\" (UniqueName: \"kubernetes.io/projected/6881ae5d-31b3-4749-bd1a-db65599d48d3-kube-api-access-nmhvv\") on node \"crc\" DevicePath \"\"" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.275216 4935 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6881ae5d-31b3-4749-bd1a-db65599d48d3-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.275379 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6881ae5d-31b3-4749-bd1a-db65599d48d3-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.290482 4935 scope.go:117] "RemoveContainer" containerID="b60f0ad2276aa44c0548c1ddb4f2ab6cf35726a57a9cd2077bcfc7ebebc249fe" Dec 01 18:37:26 crc kubenswrapper[4935]: E1201 18:37:26.291315 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b60f0ad2276aa44c0548c1ddb4f2ab6cf35726a57a9cd2077bcfc7ebebc249fe\": container with ID starting with b60f0ad2276aa44c0548c1ddb4f2ab6cf35726a57a9cd2077bcfc7ebebc249fe not found: ID does not exist" containerID="b60f0ad2276aa44c0548c1ddb4f2ab6cf35726a57a9cd2077bcfc7ebebc249fe" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.291366 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b60f0ad2276aa44c0548c1ddb4f2ab6cf35726a57a9cd2077bcfc7ebebc249fe"} err="failed to get container status \"b60f0ad2276aa44c0548c1ddb4f2ab6cf35726a57a9cd2077bcfc7ebebc249fe\": rpc error: code = NotFound desc = could not find container \"b60f0ad2276aa44c0548c1ddb4f2ab6cf35726a57a9cd2077bcfc7ebebc249fe\": container with ID starting with b60f0ad2276aa44c0548c1ddb4f2ab6cf35726a57a9cd2077bcfc7ebebc249fe not found: ID does not exist" Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.300120 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-dbvg7"] Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.307944 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-dbvg7"] Dec 01 18:37:26 crc kubenswrapper[4935]: I1201 18:37:26.524400 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6881ae5d-31b3-4749-bd1a-db65599d48d3" path="/var/lib/kubelet/pods/6881ae5d-31b3-4749-bd1a-db65599d48d3/volumes" Dec 01 18:37:29 crc kubenswrapper[4935]: I1201 18:37:29.502672 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:37:29 crc kubenswrapper[4935]: I1201 18:37:29.510879 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" Dec 01 18:37:50 crc kubenswrapper[4935]: I1201 18:37:50.464445 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:37:50 crc kubenswrapper[4935]: I1201 18:37:50.517443 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:37:51 crc kubenswrapper[4935]: I1201 18:37:51.470969 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/prometheus-k8s-0" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.195110 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7656789b46-jfxcx"] Dec 01 18:38:25 crc kubenswrapper[4935]: E1201 18:38:25.196449 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6881ae5d-31b3-4749-bd1a-db65599d48d3" containerName="console" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.196484 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="6881ae5d-31b3-4749-bd1a-db65599d48d3" containerName="console" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.196755 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="6881ae5d-31b3-4749-bd1a-db65599d48d3" containerName="console" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.197821 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.214434 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7656789b46-jfxcx"] Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.300473 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-serving-cert\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.300583 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lshcp\" (UniqueName: \"kubernetes.io/projected/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-kube-api-access-lshcp\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.300611 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-service-ca\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.300634 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-oauth-serving-cert\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.300849 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-trusted-ca-bundle\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.300908 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-oauth-config\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.300953 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-config\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.402294 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-trusted-ca-bundle\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.402337 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-oauth-config\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.402362 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-config\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.402403 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-serving-cert\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.402455 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lshcp\" (UniqueName: \"kubernetes.io/projected/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-kube-api-access-lshcp\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.402475 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-service-ca\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.402494 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-oauth-serving-cert\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.403696 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-trusted-ca-bundle\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.403904 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-oauth-serving-cert\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.403915 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-service-ca\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.404565 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-config\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.409529 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-serving-cert\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.409750 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-oauth-config\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.432740 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lshcp\" (UniqueName: \"kubernetes.io/projected/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-kube-api-access-lshcp\") pod \"console-7656789b46-jfxcx\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:25 crc kubenswrapper[4935]: I1201 18:38:25.531643 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:26 crc kubenswrapper[4935]: I1201 18:38:26.006584 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7656789b46-jfxcx"] Dec 01 18:38:26 crc kubenswrapper[4935]: W1201 18:38:26.014265 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f2c0d54_c995_41fe_8ffb_36376e7aed2a.slice/crio-af1f926af91b610a67517d1fd1022e06e4440ff63007fc9ee8e4e1d30e732494 WatchSource:0}: Error finding container af1f926af91b610a67517d1fd1022e06e4440ff63007fc9ee8e4e1d30e732494: Status 404 returned error can't find the container with id af1f926af91b610a67517d1fd1022e06e4440ff63007fc9ee8e4e1d30e732494 Dec 01 18:38:26 crc kubenswrapper[4935]: I1201 18:38:26.710137 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7656789b46-jfxcx" event={"ID":"6f2c0d54-c995-41fe-8ffb-36376e7aed2a","Type":"ContainerStarted","Data":"5c1df9854d10f362a38f17eacabce2a62ae7b4ed636e71a988285403bf5c5e91"} Dec 01 18:38:26 crc kubenswrapper[4935]: I1201 18:38:26.710571 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7656789b46-jfxcx" event={"ID":"6f2c0d54-c995-41fe-8ffb-36376e7aed2a","Type":"ContainerStarted","Data":"af1f926af91b610a67517d1fd1022e06e4440ff63007fc9ee8e4e1d30e732494"} Dec 01 18:38:26 crc kubenswrapper[4935]: I1201 18:38:26.737421 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7656789b46-jfxcx" podStartSLOduration=1.737394445 podStartE2EDuration="1.737394445s" podCreationTimestamp="2025-12-01 18:38:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:38:26.73563412 +0000 UTC m=+520.757263419" watchObservedRunningTime="2025-12-01 18:38:26.737394445 +0000 UTC m=+520.759023744" Dec 01 18:38:35 crc kubenswrapper[4935]: I1201 18:38:35.532126 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:35 crc kubenswrapper[4935]: I1201 18:38:35.533046 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:35 crc kubenswrapper[4935]: I1201 18:38:35.542488 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:35 crc kubenswrapper[4935]: I1201 18:38:35.808325 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:38:35 crc kubenswrapper[4935]: I1201 18:38:35.900472 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-6fccdf9d4b-hw5kb"] Dec 01 18:38:54 crc kubenswrapper[4935]: I1201 18:38:54.345814 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:38:54 crc kubenswrapper[4935]: I1201 18:38:54.346572 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:39:00 crc kubenswrapper[4935]: I1201 18:39:00.968411 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-6fccdf9d4b-hw5kb" podUID="0da4df77-1cb0-4dae-b1c6-d1adf88bf205" containerName="console" containerID="cri-o://6f58a44279259ca501648d0885fe48a435d35e1d9c797b6457b81ef799874d74" gracePeriod=15 Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.348110 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-6fccdf9d4b-hw5kb_0da4df77-1cb0-4dae-b1c6-d1adf88bf205/console/0.log" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.348892 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.469655 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-oauth-serving-cert\") pod \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.469720 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-config\") pod \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.469803 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-service-ca\") pod \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.469840 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-trusted-ca-bundle\") pod \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.471049 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-service-ca" (OuterVolumeSpecName: "service-ca") pod "0da4df77-1cb0-4dae-b1c6-d1adf88bf205" (UID: "0da4df77-1cb0-4dae-b1c6-d1adf88bf205"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.471136 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-config" (OuterVolumeSpecName: "console-config") pod "0da4df77-1cb0-4dae-b1c6-d1adf88bf205" (UID: "0da4df77-1cb0-4dae-b1c6-d1adf88bf205"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.471181 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-serving-cert\") pod \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.471088 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "0da4df77-1cb0-4dae-b1c6-d1adf88bf205" (UID: "0da4df77-1cb0-4dae-b1c6-d1adf88bf205"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.471356 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "0da4df77-1cb0-4dae-b1c6-d1adf88bf205" (UID: "0da4df77-1cb0-4dae-b1c6-d1adf88bf205"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.471381 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-oauth-config\") pod \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.471568 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zkh8\" (UniqueName: \"kubernetes.io/projected/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-kube-api-access-8zkh8\") pod \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\" (UID: \"0da4df77-1cb0-4dae-b1c6-d1adf88bf205\") " Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.472073 4935 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.472099 4935 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.472117 4935 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.472137 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.478370 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "0da4df77-1cb0-4dae-b1c6-d1adf88bf205" (UID: "0da4df77-1cb0-4dae-b1c6-d1adf88bf205"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.478428 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "0da4df77-1cb0-4dae-b1c6-d1adf88bf205" (UID: "0da4df77-1cb0-4dae-b1c6-d1adf88bf205"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.478486 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-kube-api-access-8zkh8" (OuterVolumeSpecName: "kube-api-access-8zkh8") pod "0da4df77-1cb0-4dae-b1c6-d1adf88bf205" (UID: "0da4df77-1cb0-4dae-b1c6-d1adf88bf205"). InnerVolumeSpecName "kube-api-access-8zkh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.574255 4935 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.574305 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zkh8\" (UniqueName: \"kubernetes.io/projected/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-kube-api-access-8zkh8\") on node \"crc\" DevicePath \"\"" Dec 01 18:39:01 crc kubenswrapper[4935]: I1201 18:39:01.574322 4935 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0da4df77-1cb0-4dae-b1c6-d1adf88bf205-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:39:02 crc kubenswrapper[4935]: I1201 18:39:02.017820 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-6fccdf9d4b-hw5kb_0da4df77-1cb0-4dae-b1c6-d1adf88bf205/console/0.log" Dec 01 18:39:02 crc kubenswrapper[4935]: I1201 18:39:02.017902 4935 generic.go:334] "Generic (PLEG): container finished" podID="0da4df77-1cb0-4dae-b1c6-d1adf88bf205" containerID="6f58a44279259ca501648d0885fe48a435d35e1d9c797b6457b81ef799874d74" exitCode=2 Dec 01 18:39:02 crc kubenswrapper[4935]: I1201 18:39:02.017951 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6fccdf9d4b-hw5kb" event={"ID":"0da4df77-1cb0-4dae-b1c6-d1adf88bf205","Type":"ContainerDied","Data":"6f58a44279259ca501648d0885fe48a435d35e1d9c797b6457b81ef799874d74"} Dec 01 18:39:02 crc kubenswrapper[4935]: I1201 18:39:02.018003 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6fccdf9d4b-hw5kb" event={"ID":"0da4df77-1cb0-4dae-b1c6-d1adf88bf205","Type":"ContainerDied","Data":"e3f70f0dc97a8f33399a54c32f62617800be9f18455a4b8a3e54018d9882a42e"} Dec 01 18:39:02 crc kubenswrapper[4935]: I1201 18:39:02.018018 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6fccdf9d4b-hw5kb" Dec 01 18:39:02 crc kubenswrapper[4935]: I1201 18:39:02.018049 4935 scope.go:117] "RemoveContainer" containerID="6f58a44279259ca501648d0885fe48a435d35e1d9c797b6457b81ef799874d74" Dec 01 18:39:02 crc kubenswrapper[4935]: I1201 18:39:02.060326 4935 scope.go:117] "RemoveContainer" containerID="6f58a44279259ca501648d0885fe48a435d35e1d9c797b6457b81ef799874d74" Dec 01 18:39:02 crc kubenswrapper[4935]: E1201 18:39:02.062540 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f58a44279259ca501648d0885fe48a435d35e1d9c797b6457b81ef799874d74\": container with ID starting with 6f58a44279259ca501648d0885fe48a435d35e1d9c797b6457b81ef799874d74 not found: ID does not exist" containerID="6f58a44279259ca501648d0885fe48a435d35e1d9c797b6457b81ef799874d74" Dec 01 18:39:02 crc kubenswrapper[4935]: I1201 18:39:02.062613 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f58a44279259ca501648d0885fe48a435d35e1d9c797b6457b81ef799874d74"} err="failed to get container status \"6f58a44279259ca501648d0885fe48a435d35e1d9c797b6457b81ef799874d74\": rpc error: code = NotFound desc = could not find container \"6f58a44279259ca501648d0885fe48a435d35e1d9c797b6457b81ef799874d74\": container with ID starting with 6f58a44279259ca501648d0885fe48a435d35e1d9c797b6457b81ef799874d74 not found: ID does not exist" Dec 01 18:39:02 crc kubenswrapper[4935]: I1201 18:39:02.074577 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-6fccdf9d4b-hw5kb"] Dec 01 18:39:02 crc kubenswrapper[4935]: I1201 18:39:02.084976 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-6fccdf9d4b-hw5kb"] Dec 01 18:39:02 crc kubenswrapper[4935]: I1201 18:39:02.521766 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0da4df77-1cb0-4dae-b1c6-d1adf88bf205" path="/var/lib/kubelet/pods/0da4df77-1cb0-4dae-b1c6-d1adf88bf205/volumes" Dec 01 18:39:24 crc kubenswrapper[4935]: I1201 18:39:24.346204 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:39:24 crc kubenswrapper[4935]: I1201 18:39:24.346879 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:39:54 crc kubenswrapper[4935]: I1201 18:39:54.347376 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:39:54 crc kubenswrapper[4935]: I1201 18:39:54.348014 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:39:54 crc kubenswrapper[4935]: I1201 18:39:54.348086 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:39:54 crc kubenswrapper[4935]: I1201 18:39:54.348917 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b347e6f589fbccdde8049b52c2b8f25c113125fe0e4295d71410044e8cbbc0ae"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 18:39:54 crc kubenswrapper[4935]: I1201 18:39:54.349006 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://b347e6f589fbccdde8049b52c2b8f25c113125fe0e4295d71410044e8cbbc0ae" gracePeriod=600 Dec 01 18:39:55 crc kubenswrapper[4935]: I1201 18:39:55.429200 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="b347e6f589fbccdde8049b52c2b8f25c113125fe0e4295d71410044e8cbbc0ae" exitCode=0 Dec 01 18:39:55 crc kubenswrapper[4935]: I1201 18:39:55.429298 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"b347e6f589fbccdde8049b52c2b8f25c113125fe0e4295d71410044e8cbbc0ae"} Dec 01 18:39:55 crc kubenswrapper[4935]: I1201 18:39:55.430419 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"f03453b0bfa80f20c0452bdf75f21c17981d4d486ecd0aa51da07478cdd727f3"} Dec 01 18:39:55 crc kubenswrapper[4935]: I1201 18:39:55.430482 4935 scope.go:117] "RemoveContainer" containerID="ba70f74e54e1786deaed12104c295d7b917d7f0c9ecd296020b6c7c70c481193" Dec 01 18:41:48 crc kubenswrapper[4935]: I1201 18:41:48.013596 4935 scope.go:117] "RemoveContainer" containerID="a9212783fedbba4d5fce7ab52adaa4fd072fb6f3c11ed92ad3ed535a012ae97b" Dec 01 18:41:54 crc kubenswrapper[4935]: I1201 18:41:54.346277 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:41:54 crc kubenswrapper[4935]: I1201 18:41:54.346983 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:42:24 crc kubenswrapper[4935]: I1201 18:42:24.345895 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:42:24 crc kubenswrapper[4935]: I1201 18:42:24.346425 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:42:26 crc kubenswrapper[4935]: I1201 18:42:26.595893 4935 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 18:42:27 crc kubenswrapper[4935]: I1201 18:42:27.822945 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k46vq"] Dec 01 18:42:27 crc kubenswrapper[4935]: E1201 18:42:27.823266 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0da4df77-1cb0-4dae-b1c6-d1adf88bf205" containerName="console" Dec 01 18:42:27 crc kubenswrapper[4935]: I1201 18:42:27.823281 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="0da4df77-1cb0-4dae-b1c6-d1adf88bf205" containerName="console" Dec 01 18:42:27 crc kubenswrapper[4935]: I1201 18:42:27.823434 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="0da4df77-1cb0-4dae-b1c6-d1adf88bf205" containerName="console" Dec 01 18:42:27 crc kubenswrapper[4935]: I1201 18:42:27.824381 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:27 crc kubenswrapper[4935]: I1201 18:42:27.851030 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k46vq"] Dec 01 18:42:27 crc kubenswrapper[4935]: I1201 18:42:27.980273 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42d12fb1-3fcf-4043-9d62-c6245a47af0e-utilities\") pod \"redhat-marketplace-k46vq\" (UID: \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\") " pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:27 crc kubenswrapper[4935]: I1201 18:42:27.980408 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzg7r\" (UniqueName: \"kubernetes.io/projected/42d12fb1-3fcf-4043-9d62-c6245a47af0e-kube-api-access-rzg7r\") pod \"redhat-marketplace-k46vq\" (UID: \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\") " pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:27 crc kubenswrapper[4935]: I1201 18:42:27.980431 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42d12fb1-3fcf-4043-9d62-c6245a47af0e-catalog-content\") pod \"redhat-marketplace-k46vq\" (UID: \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\") " pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:28 crc kubenswrapper[4935]: I1201 18:42:28.082262 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzg7r\" (UniqueName: \"kubernetes.io/projected/42d12fb1-3fcf-4043-9d62-c6245a47af0e-kube-api-access-rzg7r\") pod \"redhat-marketplace-k46vq\" (UID: \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\") " pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:28 crc kubenswrapper[4935]: I1201 18:42:28.082303 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42d12fb1-3fcf-4043-9d62-c6245a47af0e-catalog-content\") pod \"redhat-marketplace-k46vq\" (UID: \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\") " pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:28 crc kubenswrapper[4935]: I1201 18:42:28.082374 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42d12fb1-3fcf-4043-9d62-c6245a47af0e-utilities\") pod \"redhat-marketplace-k46vq\" (UID: \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\") " pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:28 crc kubenswrapper[4935]: I1201 18:42:28.082842 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42d12fb1-3fcf-4043-9d62-c6245a47af0e-utilities\") pod \"redhat-marketplace-k46vq\" (UID: \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\") " pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:28 crc kubenswrapper[4935]: I1201 18:42:28.082970 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42d12fb1-3fcf-4043-9d62-c6245a47af0e-catalog-content\") pod \"redhat-marketplace-k46vq\" (UID: \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\") " pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:28 crc kubenswrapper[4935]: I1201 18:42:28.119003 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzg7r\" (UniqueName: \"kubernetes.io/projected/42d12fb1-3fcf-4043-9d62-c6245a47af0e-kube-api-access-rzg7r\") pod \"redhat-marketplace-k46vq\" (UID: \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\") " pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:28 crc kubenswrapper[4935]: I1201 18:42:28.150525 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:28 crc kubenswrapper[4935]: I1201 18:42:28.606057 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k46vq"] Dec 01 18:42:29 crc kubenswrapper[4935]: I1201 18:42:29.588224 4935 generic.go:334] "Generic (PLEG): container finished" podID="42d12fb1-3fcf-4043-9d62-c6245a47af0e" containerID="22127b2cd7ce5f12f4808c133fe13f6edc4df6268d87e8f24b1db2de8ee8c78f" exitCode=0 Dec 01 18:42:29 crc kubenswrapper[4935]: I1201 18:42:29.588267 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k46vq" event={"ID":"42d12fb1-3fcf-4043-9d62-c6245a47af0e","Type":"ContainerDied","Data":"22127b2cd7ce5f12f4808c133fe13f6edc4df6268d87e8f24b1db2de8ee8c78f"} Dec 01 18:42:29 crc kubenswrapper[4935]: I1201 18:42:29.588292 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k46vq" event={"ID":"42d12fb1-3fcf-4043-9d62-c6245a47af0e","Type":"ContainerStarted","Data":"e97d225a9d8d73165a331e633ba2c5d665e00bb8b42954567030ff169b516a24"} Dec 01 18:42:29 crc kubenswrapper[4935]: I1201 18:42:29.591266 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 18:42:31 crc kubenswrapper[4935]: I1201 18:42:31.610321 4935 generic.go:334] "Generic (PLEG): container finished" podID="42d12fb1-3fcf-4043-9d62-c6245a47af0e" containerID="ff483e9ec71df0fcfabf81fdd5ac4019ee112c2ff6d3fc22e4e9541476ee77df" exitCode=0 Dec 01 18:42:31 crc kubenswrapper[4935]: I1201 18:42:31.610405 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k46vq" event={"ID":"42d12fb1-3fcf-4043-9d62-c6245a47af0e","Type":"ContainerDied","Data":"ff483e9ec71df0fcfabf81fdd5ac4019ee112c2ff6d3fc22e4e9541476ee77df"} Dec 01 18:42:32 crc kubenswrapper[4935]: I1201 18:42:32.608302 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rmmnw"] Dec 01 18:42:32 crc kubenswrapper[4935]: I1201 18:42:32.609730 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:32 crc kubenswrapper[4935]: I1201 18:42:32.625042 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rmmnw"] Dec 01 18:42:32 crc kubenswrapper[4935]: I1201 18:42:32.743376 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-utilities\") pod \"certified-operators-rmmnw\" (UID: \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\") " pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:32 crc kubenswrapper[4935]: I1201 18:42:32.743478 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-catalog-content\") pod \"certified-operators-rmmnw\" (UID: \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\") " pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:32 crc kubenswrapper[4935]: I1201 18:42:32.743515 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvkfj\" (UniqueName: \"kubernetes.io/projected/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-kube-api-access-pvkfj\") pod \"certified-operators-rmmnw\" (UID: \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\") " pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:32 crc kubenswrapper[4935]: I1201 18:42:32.845088 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-catalog-content\") pod \"certified-operators-rmmnw\" (UID: \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\") " pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:32 crc kubenswrapper[4935]: I1201 18:42:32.845468 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvkfj\" (UniqueName: \"kubernetes.io/projected/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-kube-api-access-pvkfj\") pod \"certified-operators-rmmnw\" (UID: \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\") " pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:32 crc kubenswrapper[4935]: I1201 18:42:32.845551 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-utilities\") pod \"certified-operators-rmmnw\" (UID: \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\") " pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:32 crc kubenswrapper[4935]: I1201 18:42:32.845602 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-catalog-content\") pod \"certified-operators-rmmnw\" (UID: \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\") " pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:32 crc kubenswrapper[4935]: I1201 18:42:32.845988 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-utilities\") pod \"certified-operators-rmmnw\" (UID: \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\") " pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:32 crc kubenswrapper[4935]: I1201 18:42:32.872559 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvkfj\" (UniqueName: \"kubernetes.io/projected/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-kube-api-access-pvkfj\") pod \"certified-operators-rmmnw\" (UID: \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\") " pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:32 crc kubenswrapper[4935]: I1201 18:42:32.943464 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:33 crc kubenswrapper[4935]: I1201 18:42:33.198365 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rmmnw"] Dec 01 18:42:33 crc kubenswrapper[4935]: I1201 18:42:33.625391 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k46vq" event={"ID":"42d12fb1-3fcf-4043-9d62-c6245a47af0e","Type":"ContainerStarted","Data":"7e084a980bce01840d1585ab306ed445aba8c5c1267702a88262360227b12c25"} Dec 01 18:42:33 crc kubenswrapper[4935]: I1201 18:42:33.626869 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rmmnw" event={"ID":"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1","Type":"ContainerStarted","Data":"4b04eb961cb8c0f9cff72543961e472d028b9118a0909b646f41df858f42e262"} Dec 01 18:42:33 crc kubenswrapper[4935]: I1201 18:42:33.626917 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rmmnw" event={"ID":"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1","Type":"ContainerStarted","Data":"a6be85e2af3c63d4d5928a159e0315c0e8042c70af726bc0f042afaf9ce360a7"} Dec 01 18:42:33 crc kubenswrapper[4935]: I1201 18:42:33.650306 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k46vq" podStartSLOduration=3.562733201 podStartE2EDuration="6.65028542s" podCreationTimestamp="2025-12-01 18:42:27 +0000 UTC" firstStartedPulling="2025-12-01 18:42:29.590965713 +0000 UTC m=+763.612594982" lastFinishedPulling="2025-12-01 18:42:32.678517902 +0000 UTC m=+766.700147201" observedRunningTime="2025-12-01 18:42:33.646678757 +0000 UTC m=+767.668308036" watchObservedRunningTime="2025-12-01 18:42:33.65028542 +0000 UTC m=+767.671914689" Dec 01 18:42:34 crc kubenswrapper[4935]: I1201 18:42:34.636821 4935 generic.go:334] "Generic (PLEG): container finished" podID="052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" containerID="4b04eb961cb8c0f9cff72543961e472d028b9118a0909b646f41df858f42e262" exitCode=0 Dec 01 18:42:34 crc kubenswrapper[4935]: I1201 18:42:34.636907 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rmmnw" event={"ID":"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1","Type":"ContainerDied","Data":"4b04eb961cb8c0f9cff72543961e472d028b9118a0909b646f41df858f42e262"} Dec 01 18:42:36 crc kubenswrapper[4935]: I1201 18:42:36.653882 4935 generic.go:334] "Generic (PLEG): container finished" podID="052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" containerID="09bd1e8e5b7d9c19345c8527ad55d227f38deca7d41c4ae4af2caf07275ab41a" exitCode=0 Dec 01 18:42:36 crc kubenswrapper[4935]: I1201 18:42:36.653937 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rmmnw" event={"ID":"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1","Type":"ContainerDied","Data":"09bd1e8e5b7d9c19345c8527ad55d227f38deca7d41c4ae4af2caf07275ab41a"} Dec 01 18:42:37 crc kubenswrapper[4935]: I1201 18:42:37.662863 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rmmnw" event={"ID":"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1","Type":"ContainerStarted","Data":"5b33e1933da3bf29034d420253f63855cc02b00cbcb44cd41071b84378c97997"} Dec 01 18:42:38 crc kubenswrapper[4935]: I1201 18:42:38.151224 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:38 crc kubenswrapper[4935]: I1201 18:42:38.151284 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:38 crc kubenswrapper[4935]: I1201 18:42:38.199520 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:38 crc kubenswrapper[4935]: I1201 18:42:38.219842 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rmmnw" podStartSLOduration=3.373930471 podStartE2EDuration="6.21981926s" podCreationTimestamp="2025-12-01 18:42:32 +0000 UTC" firstStartedPulling="2025-12-01 18:42:34.639364652 +0000 UTC m=+768.660993921" lastFinishedPulling="2025-12-01 18:42:37.485253421 +0000 UTC m=+771.506882710" observedRunningTime="2025-12-01 18:42:37.679001384 +0000 UTC m=+771.700630663" watchObservedRunningTime="2025-12-01 18:42:38.21981926 +0000 UTC m=+772.241448539" Dec 01 18:42:38 crc kubenswrapper[4935]: I1201 18:42:38.721243 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:41 crc kubenswrapper[4935]: I1201 18:42:41.584274 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k46vq"] Dec 01 18:42:41 crc kubenswrapper[4935]: I1201 18:42:41.584831 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k46vq" podUID="42d12fb1-3fcf-4043-9d62-c6245a47af0e" containerName="registry-server" containerID="cri-o://7e084a980bce01840d1585ab306ed445aba8c5c1267702a88262360227b12c25" gracePeriod=2 Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.466122 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.589962 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42d12fb1-3fcf-4043-9d62-c6245a47af0e-utilities\") pod \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\" (UID: \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\") " Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.590036 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42d12fb1-3fcf-4043-9d62-c6245a47af0e-catalog-content\") pod \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\" (UID: \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\") " Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.590110 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzg7r\" (UniqueName: \"kubernetes.io/projected/42d12fb1-3fcf-4043-9d62-c6245a47af0e-kube-api-access-rzg7r\") pod \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\" (UID: \"42d12fb1-3fcf-4043-9d62-c6245a47af0e\") " Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.591078 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42d12fb1-3fcf-4043-9d62-c6245a47af0e-utilities" (OuterVolumeSpecName: "utilities") pod "42d12fb1-3fcf-4043-9d62-c6245a47af0e" (UID: "42d12fb1-3fcf-4043-9d62-c6245a47af0e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.597178 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42d12fb1-3fcf-4043-9d62-c6245a47af0e-kube-api-access-rzg7r" (OuterVolumeSpecName: "kube-api-access-rzg7r") pod "42d12fb1-3fcf-4043-9d62-c6245a47af0e" (UID: "42d12fb1-3fcf-4043-9d62-c6245a47af0e"). InnerVolumeSpecName "kube-api-access-rzg7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.610066 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42d12fb1-3fcf-4043-9d62-c6245a47af0e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "42d12fb1-3fcf-4043-9d62-c6245a47af0e" (UID: "42d12fb1-3fcf-4043-9d62-c6245a47af0e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.691138 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42d12fb1-3fcf-4043-9d62-c6245a47af0e-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.691198 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42d12fb1-3fcf-4043-9d62-c6245a47af0e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.691210 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzg7r\" (UniqueName: \"kubernetes.io/projected/42d12fb1-3fcf-4043-9d62-c6245a47af0e-kube-api-access-rzg7r\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.699549 4935 generic.go:334] "Generic (PLEG): container finished" podID="42d12fb1-3fcf-4043-9d62-c6245a47af0e" containerID="7e084a980bce01840d1585ab306ed445aba8c5c1267702a88262360227b12c25" exitCode=0 Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.699607 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k46vq" event={"ID":"42d12fb1-3fcf-4043-9d62-c6245a47af0e","Type":"ContainerDied","Data":"7e084a980bce01840d1585ab306ed445aba8c5c1267702a88262360227b12c25"} Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.699656 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k46vq" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.699748 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k46vq" event={"ID":"42d12fb1-3fcf-4043-9d62-c6245a47af0e","Type":"ContainerDied","Data":"e97d225a9d8d73165a331e633ba2c5d665e00bb8b42954567030ff169b516a24"} Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.699774 4935 scope.go:117] "RemoveContainer" containerID="7e084a980bce01840d1585ab306ed445aba8c5c1267702a88262360227b12c25" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.717725 4935 scope.go:117] "RemoveContainer" containerID="ff483e9ec71df0fcfabf81fdd5ac4019ee112c2ff6d3fc22e4e9541476ee77df" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.731250 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k46vq"] Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.735260 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k46vq"] Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.766126 4935 scope.go:117] "RemoveContainer" containerID="22127b2cd7ce5f12f4808c133fe13f6edc4df6268d87e8f24b1db2de8ee8c78f" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.780243 4935 scope.go:117] "RemoveContainer" containerID="7e084a980bce01840d1585ab306ed445aba8c5c1267702a88262360227b12c25" Dec 01 18:42:42 crc kubenswrapper[4935]: E1201 18:42:42.780697 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e084a980bce01840d1585ab306ed445aba8c5c1267702a88262360227b12c25\": container with ID starting with 7e084a980bce01840d1585ab306ed445aba8c5c1267702a88262360227b12c25 not found: ID does not exist" containerID="7e084a980bce01840d1585ab306ed445aba8c5c1267702a88262360227b12c25" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.780728 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e084a980bce01840d1585ab306ed445aba8c5c1267702a88262360227b12c25"} err="failed to get container status \"7e084a980bce01840d1585ab306ed445aba8c5c1267702a88262360227b12c25\": rpc error: code = NotFound desc = could not find container \"7e084a980bce01840d1585ab306ed445aba8c5c1267702a88262360227b12c25\": container with ID starting with 7e084a980bce01840d1585ab306ed445aba8c5c1267702a88262360227b12c25 not found: ID does not exist" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.781296 4935 scope.go:117] "RemoveContainer" containerID="ff483e9ec71df0fcfabf81fdd5ac4019ee112c2ff6d3fc22e4e9541476ee77df" Dec 01 18:42:42 crc kubenswrapper[4935]: E1201 18:42:42.781628 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff483e9ec71df0fcfabf81fdd5ac4019ee112c2ff6d3fc22e4e9541476ee77df\": container with ID starting with ff483e9ec71df0fcfabf81fdd5ac4019ee112c2ff6d3fc22e4e9541476ee77df not found: ID does not exist" containerID="ff483e9ec71df0fcfabf81fdd5ac4019ee112c2ff6d3fc22e4e9541476ee77df" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.781648 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff483e9ec71df0fcfabf81fdd5ac4019ee112c2ff6d3fc22e4e9541476ee77df"} err="failed to get container status \"ff483e9ec71df0fcfabf81fdd5ac4019ee112c2ff6d3fc22e4e9541476ee77df\": rpc error: code = NotFound desc = could not find container \"ff483e9ec71df0fcfabf81fdd5ac4019ee112c2ff6d3fc22e4e9541476ee77df\": container with ID starting with ff483e9ec71df0fcfabf81fdd5ac4019ee112c2ff6d3fc22e4e9541476ee77df not found: ID does not exist" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.781663 4935 scope.go:117] "RemoveContainer" containerID="22127b2cd7ce5f12f4808c133fe13f6edc4df6268d87e8f24b1db2de8ee8c78f" Dec 01 18:42:42 crc kubenswrapper[4935]: E1201 18:42:42.781934 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22127b2cd7ce5f12f4808c133fe13f6edc4df6268d87e8f24b1db2de8ee8c78f\": container with ID starting with 22127b2cd7ce5f12f4808c133fe13f6edc4df6268d87e8f24b1db2de8ee8c78f not found: ID does not exist" containerID="22127b2cd7ce5f12f4808c133fe13f6edc4df6268d87e8f24b1db2de8ee8c78f" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.781958 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22127b2cd7ce5f12f4808c133fe13f6edc4df6268d87e8f24b1db2de8ee8c78f"} err="failed to get container status \"22127b2cd7ce5f12f4808c133fe13f6edc4df6268d87e8f24b1db2de8ee8c78f\": rpc error: code = NotFound desc = could not find container \"22127b2cd7ce5f12f4808c133fe13f6edc4df6268d87e8f24b1db2de8ee8c78f\": container with ID starting with 22127b2cd7ce5f12f4808c133fe13f6edc4df6268d87e8f24b1db2de8ee8c78f not found: ID does not exist" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.944410 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.944495 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:42 crc kubenswrapper[4935]: I1201 18:42:42.997636 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.477040 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk"] Dec 01 18:42:43 crc kubenswrapper[4935]: E1201 18:42:43.477717 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42d12fb1-3fcf-4043-9d62-c6245a47af0e" containerName="extract-utilities" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.477733 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="42d12fb1-3fcf-4043-9d62-c6245a47af0e" containerName="extract-utilities" Dec 01 18:42:43 crc kubenswrapper[4935]: E1201 18:42:43.477750 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42d12fb1-3fcf-4043-9d62-c6245a47af0e" containerName="registry-server" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.477758 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="42d12fb1-3fcf-4043-9d62-c6245a47af0e" containerName="registry-server" Dec 01 18:42:43 crc kubenswrapper[4935]: E1201 18:42:43.477770 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42d12fb1-3fcf-4043-9d62-c6245a47af0e" containerName="extract-content" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.477779 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="42d12fb1-3fcf-4043-9d62-c6245a47af0e" containerName="extract-content" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.477922 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="42d12fb1-3fcf-4043-9d62-c6245a47af0e" containerName="registry-server" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.478973 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.485565 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.494733 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk"] Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.605725 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfrmt\" (UniqueName: \"kubernetes.io/projected/725a954a-2f06-4951-bfe5-c4db016352ca-kube-api-access-nfrmt\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk\" (UID: \"725a954a-2f06-4951-bfe5-c4db016352ca\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.605891 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/725a954a-2f06-4951-bfe5-c4db016352ca-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk\" (UID: \"725a954a-2f06-4951-bfe5-c4db016352ca\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.605948 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/725a954a-2f06-4951-bfe5-c4db016352ca-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk\" (UID: \"725a954a-2f06-4951-bfe5-c4db016352ca\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.707374 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/725a954a-2f06-4951-bfe5-c4db016352ca-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk\" (UID: \"725a954a-2f06-4951-bfe5-c4db016352ca\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.707867 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfrmt\" (UniqueName: \"kubernetes.io/projected/725a954a-2f06-4951-bfe5-c4db016352ca-kube-api-access-nfrmt\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk\" (UID: \"725a954a-2f06-4951-bfe5-c4db016352ca\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.708013 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/725a954a-2f06-4951-bfe5-c4db016352ca-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk\" (UID: \"725a954a-2f06-4951-bfe5-c4db016352ca\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.708300 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/725a954a-2f06-4951-bfe5-c4db016352ca-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk\" (UID: \"725a954a-2f06-4951-bfe5-c4db016352ca\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.708872 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/725a954a-2f06-4951-bfe5-c4db016352ca-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk\" (UID: \"725a954a-2f06-4951-bfe5-c4db016352ca\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.744276 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfrmt\" (UniqueName: \"kubernetes.io/projected/725a954a-2f06-4951-bfe5-c4db016352ca-kube-api-access-nfrmt\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk\" (UID: \"725a954a-2f06-4951-bfe5-c4db016352ca\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.791779 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:43 crc kubenswrapper[4935]: I1201 18:42:43.803539 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" Dec 01 18:42:44 crc kubenswrapper[4935]: I1201 18:42:44.231336 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk"] Dec 01 18:42:44 crc kubenswrapper[4935]: I1201 18:42:44.517502 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42d12fb1-3fcf-4043-9d62-c6245a47af0e" path="/var/lib/kubelet/pods/42d12fb1-3fcf-4043-9d62-c6245a47af0e/volumes" Dec 01 18:42:44 crc kubenswrapper[4935]: I1201 18:42:44.725947 4935 generic.go:334] "Generic (PLEG): container finished" podID="725a954a-2f06-4951-bfe5-c4db016352ca" containerID="92219d6591fe01936b730784e0be54bb8bd8be74de4b85270877e1a686e3bb70" exitCode=0 Dec 01 18:42:44 crc kubenswrapper[4935]: I1201 18:42:44.726075 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" event={"ID":"725a954a-2f06-4951-bfe5-c4db016352ca","Type":"ContainerDied","Data":"92219d6591fe01936b730784e0be54bb8bd8be74de4b85270877e1a686e3bb70"} Dec 01 18:42:44 crc kubenswrapper[4935]: I1201 18:42:44.726136 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" event={"ID":"725a954a-2f06-4951-bfe5-c4db016352ca","Type":"ContainerStarted","Data":"07169d1f6d5ff2bf604e85b3300123818024074118c86b8b02ea646c90d7fb34"} Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.362486 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4s97m"] Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.362957 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovn-controller" containerID="cri-o://043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2" gracePeriod=30 Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.363405 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovn-acl-logging" containerID="cri-o://bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8" gracePeriod=30 Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.363456 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="sbdb" containerID="cri-o://51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c" gracePeriod=30 Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.363498 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="nbdb" containerID="cri-o://ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd" gracePeriod=30 Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.363537 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="northd" containerID="cri-o://d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d" gracePeriod=30 Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.363632 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="kube-rbac-proxy-node" containerID="cri-o://f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7" gracePeriod=30 Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.363788 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222" gracePeriod=30 Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.416208 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" containerID="cri-o://d33f5de50fb51090f5f7b3456f4a107e432af52c9e3fc141f21ee67dee73d018" gracePeriod=30 Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.740432 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/3.log" Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.741009 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/1.log" Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.743608 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/0.log" Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.744635 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8" exitCode=143 Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.744680 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8"} Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.744718 4935 scope.go:117] "RemoveContainer" containerID="b20a620ff9aecf2520cee1ff52eef73341e54d7ecb055482227443dfeb98a05a" Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.785200 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rmmnw"] Dec 01 18:42:46 crc kubenswrapper[4935]: I1201 18:42:46.785495 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rmmnw" podUID="052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" containerName="registry-server" containerID="cri-o://5b33e1933da3bf29034d420253f63855cc02b00cbcb44cd41071b84378c97997" gracePeriod=2 Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.015970 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.156896 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-catalog-content\") pod \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\" (UID: \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\") " Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.156998 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvkfj\" (UniqueName: \"kubernetes.io/projected/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-kube-api-access-pvkfj\") pod \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\" (UID: \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\") " Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.157053 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-utilities\") pod \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\" (UID: \"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1\") " Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.160452 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-utilities" (OuterVolumeSpecName: "utilities") pod "052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" (UID: "052dd4e1-eb43-4759-bbdb-97bf0e11c8e1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.164602 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-kube-api-access-pvkfj" (OuterVolumeSpecName: "kube-api-access-pvkfj") pod "052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" (UID: "052dd4e1-eb43-4759-bbdb-97bf0e11c8e1"). InnerVolumeSpecName "kube-api-access-pvkfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.227114 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" (UID: "052dd4e1-eb43-4759-bbdb-97bf0e11c8e1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.259269 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.259311 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvkfj\" (UniqueName: \"kubernetes.io/projected/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-kube-api-access-pvkfj\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.259326 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.753973 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovnkube-controller/3.log" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.754418 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/1.log" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.756886 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-controller/0.log" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758118 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="d33f5de50fb51090f5f7b3456f4a107e432af52c9e3fc141f21ee67dee73d018" exitCode=0 Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758214 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c" exitCode=0 Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758227 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd" exitCode=0 Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758213 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"d33f5de50fb51090f5f7b3456f4a107e432af52c9e3fc141f21ee67dee73d018"} Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758285 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c"} Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758304 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd"} Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758322 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d"} Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758240 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d" exitCode=0 Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758343 4935 scope.go:117] "RemoveContainer" containerID="b4c9eda179fdbb586d2a7b300cd1e376bfe4104808504d81618991cc5311f4f1" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758364 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222" exitCode=0 Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758385 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7" exitCode=0 Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758394 4935 generic.go:334] "Generic (PLEG): container finished" podID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerID="043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2" exitCode=143 Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758442 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222"} Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758490 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7"} Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.758508 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2"} Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.761368 4935 generic.go:334] "Generic (PLEG): container finished" podID="052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" containerID="5b33e1933da3bf29034d420253f63855cc02b00cbcb44cd41071b84378c97997" exitCode=0 Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.761430 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rmmnw" event={"ID":"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1","Type":"ContainerDied","Data":"5b33e1933da3bf29034d420253f63855cc02b00cbcb44cd41071b84378c97997"} Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.761738 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rmmnw" event={"ID":"052dd4e1-eb43-4759-bbdb-97bf0e11c8e1","Type":"ContainerDied","Data":"a6be85e2af3c63d4d5928a159e0315c0e8042c70af726bc0f042afaf9ce360a7"} Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.761444 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rmmnw" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.763599 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzx4x_3f7b45c6-7cf7-420d-afb3-ea00b791af58/kube-multus/2.log" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.764187 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzx4x_3f7b45c6-7cf7-420d-afb3-ea00b791af58/kube-multus/1.log" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.764227 4935 generic.go:334] "Generic (PLEG): container finished" podID="3f7b45c6-7cf7-420d-afb3-ea00b791af58" containerID="10d7e9da09acf4f48bb842abb0be17f1c105a5670e59294570ce7d7f84b9ed82" exitCode=2 Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.764284 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzx4x" event={"ID":"3f7b45c6-7cf7-420d-afb3-ea00b791af58","Type":"ContainerDied","Data":"10d7e9da09acf4f48bb842abb0be17f1c105a5670e59294570ce7d7f84b9ed82"} Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.764822 4935 scope.go:117] "RemoveContainer" containerID="10d7e9da09acf4f48bb842abb0be17f1c105a5670e59294570ce7d7f84b9ed82" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.768132 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" event={"ID":"725a954a-2f06-4951-bfe5-c4db016352ca","Type":"ContainerDied","Data":"760f7095b373535e96ed5346bac9922ba087741f53f27a349adac8c071cb07e7"} Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.768077 4935 generic.go:334] "Generic (PLEG): container finished" podID="725a954a-2f06-4951-bfe5-c4db016352ca" containerID="760f7095b373535e96ed5346bac9922ba087741f53f27a349adac8c071cb07e7" exitCode=0 Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.798066 4935 scope.go:117] "RemoveContainer" containerID="5b33e1933da3bf29034d420253f63855cc02b00cbcb44cd41071b84378c97997" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.841080 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rmmnw"] Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.844357 4935 scope.go:117] "RemoveContainer" containerID="09bd1e8e5b7d9c19345c8527ad55d227f38deca7d41c4ae4af2caf07275ab41a" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.846325 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rmmnw"] Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.911698 4935 scope.go:117] "RemoveContainer" containerID="4b04eb961cb8c0f9cff72543961e472d028b9118a0909b646f41df858f42e262" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.938515 4935 scope.go:117] "RemoveContainer" containerID="5b33e1933da3bf29034d420253f63855cc02b00cbcb44cd41071b84378c97997" Dec 01 18:42:47 crc kubenswrapper[4935]: E1201 18:42:47.939380 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b33e1933da3bf29034d420253f63855cc02b00cbcb44cd41071b84378c97997\": container with ID starting with 5b33e1933da3bf29034d420253f63855cc02b00cbcb44cd41071b84378c97997 not found: ID does not exist" containerID="5b33e1933da3bf29034d420253f63855cc02b00cbcb44cd41071b84378c97997" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.939445 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b33e1933da3bf29034d420253f63855cc02b00cbcb44cd41071b84378c97997"} err="failed to get container status \"5b33e1933da3bf29034d420253f63855cc02b00cbcb44cd41071b84378c97997\": rpc error: code = NotFound desc = could not find container \"5b33e1933da3bf29034d420253f63855cc02b00cbcb44cd41071b84378c97997\": container with ID starting with 5b33e1933da3bf29034d420253f63855cc02b00cbcb44cd41071b84378c97997 not found: ID does not exist" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.939473 4935 scope.go:117] "RemoveContainer" containerID="09bd1e8e5b7d9c19345c8527ad55d227f38deca7d41c4ae4af2caf07275ab41a" Dec 01 18:42:47 crc kubenswrapper[4935]: E1201 18:42:47.939863 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09bd1e8e5b7d9c19345c8527ad55d227f38deca7d41c4ae4af2caf07275ab41a\": container with ID starting with 09bd1e8e5b7d9c19345c8527ad55d227f38deca7d41c4ae4af2caf07275ab41a not found: ID does not exist" containerID="09bd1e8e5b7d9c19345c8527ad55d227f38deca7d41c4ae4af2caf07275ab41a" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.940116 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09bd1e8e5b7d9c19345c8527ad55d227f38deca7d41c4ae4af2caf07275ab41a"} err="failed to get container status \"09bd1e8e5b7d9c19345c8527ad55d227f38deca7d41c4ae4af2caf07275ab41a\": rpc error: code = NotFound desc = could not find container \"09bd1e8e5b7d9c19345c8527ad55d227f38deca7d41c4ae4af2caf07275ab41a\": container with ID starting with 09bd1e8e5b7d9c19345c8527ad55d227f38deca7d41c4ae4af2caf07275ab41a not found: ID does not exist" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.940155 4935 scope.go:117] "RemoveContainer" containerID="4b04eb961cb8c0f9cff72543961e472d028b9118a0909b646f41df858f42e262" Dec 01 18:42:47 crc kubenswrapper[4935]: E1201 18:42:47.940379 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b04eb961cb8c0f9cff72543961e472d028b9118a0909b646f41df858f42e262\": container with ID starting with 4b04eb961cb8c0f9cff72543961e472d028b9118a0909b646f41df858f42e262 not found: ID does not exist" containerID="4b04eb961cb8c0f9cff72543961e472d028b9118a0909b646f41df858f42e262" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.940403 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b04eb961cb8c0f9cff72543961e472d028b9118a0909b646f41df858f42e262"} err="failed to get container status \"4b04eb961cb8c0f9cff72543961e472d028b9118a0909b646f41df858f42e262\": rpc error: code = NotFound desc = could not find container \"4b04eb961cb8c0f9cff72543961e472d028b9118a0909b646f41df858f42e262\": container with ID starting with 4b04eb961cb8c0f9cff72543961e472d028b9118a0909b646f41df858f42e262 not found: ID does not exist" Dec 01 18:42:47 crc kubenswrapper[4935]: I1201 18:42:47.940415 4935 scope.go:117] "RemoveContainer" containerID="5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.076782 4935 scope.go:117] "RemoveContainer" containerID="5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.438063 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-acl-logging/1.log" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.441885 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4s97m_f839cb87-9d0b-44af-a9a9-8a6df524aa62/ovn-controller/0.log" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.442341 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.447025 4935 log.go:32] "RemoveContainer from runtime service failed" err="rpc error: code = Unknown desc = failed to delete container k8s_kube-multus_multus-jzx4x_openshift-multus_3f7b45c6-7cf7-420d-afb3-ea00b791af58_1 in pod sandbox c033906f5955f21ab3800db25528396f89e52f6e1c93305f5b72aae7fe02d267 from index: no such id: '5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868'" containerID="5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.447062 4935 kuberuntime_gc.go:150] "Failed to remove container" err="rpc error: code = Unknown desc = failed to delete container k8s_kube-multus_multus-jzx4x_openshift-multus_3f7b45c6-7cf7-420d-afb3-ea00b791af58_1 in pod sandbox c033906f5955f21ab3800db25528396f89e52f6e1c93305f5b72aae7fe02d267 from index: no such id: '5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868'" containerID="5ba32f93bf5ab7da179cbeaf9f6b97bc2b9e7be58f57c2d815a690663ec16868" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.520546 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" path="/var/lib/kubelet/pods/052dd4e1-eb43-4759-bbdb-97bf0e11c8e1/volumes" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524137 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hjgvx"] Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524515 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="kube-rbac-proxy-node" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524545 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="kube-rbac-proxy-node" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524569 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="northd" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524582 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="northd" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524597 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" containerName="extract-utilities" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524607 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" containerName="extract-utilities" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524621 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524632 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524648 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524673 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524689 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524702 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524721 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovn-acl-logging" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524742 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovn-acl-logging" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524757 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524768 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524782 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" containerName="extract-content" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524791 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" containerName="extract-content" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524806 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="kubecfg-setup" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524816 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="kubecfg-setup" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524831 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovn-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524841 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovn-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524856 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524869 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524881 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" containerName="registry-server" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524891 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" containerName="registry-server" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524909 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="sbdb" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524919 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="sbdb" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.524938 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="nbdb" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.524948 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="nbdb" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525122 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovn-acl-logging" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525254 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525271 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525283 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525302 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525315 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525327 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="northd" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525344 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="sbdb" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525360 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovn-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525372 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="052dd4e1-eb43-4759-bbdb-97bf0e11c8e1" containerName="registry-server" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525387 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="kube-rbac-proxy-node" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525398 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="nbdb" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.525571 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525586 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: E1201 18:42:48.525620 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovn-acl-logging" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525631 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovn-acl-logging" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525831 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovnkube-controller" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.525861 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" containerName="ovn-acl-logging" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.528928 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585623 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-run-ovn-kubernetes\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585681 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-var-lib-cni-networks-ovn-kubernetes\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585718 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-openvswitch\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585737 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-node-log\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585756 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-cni-bin\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585782 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-ovn\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585799 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-run-netns\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585789 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585824 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-kubelet\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585847 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-systemd-units\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585861 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-slash\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585862 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585893 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-node-log" (OuterVolumeSpecName: "node-log") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585901 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-systemd\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585919 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585926 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-cni-netd\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585946 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585962 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovnkube-config\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585973 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-slash" (OuterVolumeSpecName: "host-slash") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585997 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586015 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.585915 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586128 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586120 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586527 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586565 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-etc-openvswitch\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586615 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-log-socket\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586690 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-log-socket" (OuterVolumeSpecName: "log-socket") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586715 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovn-node-metrics-cert\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586739 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-var-lib-openvswitch\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586773 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586798 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-env-overrides\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586845 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovnkube-script-lib\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586870 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmptx\" (UniqueName: \"kubernetes.io/projected/f839cb87-9d0b-44af-a9a9-8a6df524aa62-kube-api-access-hmptx\") pod \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\" (UID: \"f839cb87-9d0b-44af-a9a9-8a6df524aa62\") " Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.586895 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587194 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587344 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587696 4935 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587746 4935 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587763 4935 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587775 4935 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-node-log\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587787 4935 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587800 4935 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587812 4935 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587825 4935 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587838 4935 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587849 4935 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-slash\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587862 4935 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587873 4935 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587884 4935 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587895 4935 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-log-socket\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587906 4935 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587917 4935 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.587927 4935 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.597845 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f839cb87-9d0b-44af-a9a9-8a6df524aa62-kube-api-access-hmptx" (OuterVolumeSpecName: "kube-api-access-hmptx") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "kube-api-access-hmptx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.597821 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.601108 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "f839cb87-9d0b-44af-a9a9-8a6df524aa62" (UID: "f839cb87-9d0b-44af-a9a9-8a6df524aa62"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689324 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4676a5dc-40e7-415d-b4c3-243fa8485b24-env-overrides\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689398 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-run-systemd\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689422 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-run-openvswitch\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689439 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4676a5dc-40e7-415d-b4c3-243fa8485b24-ovn-node-metrics-cert\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689462 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-run-ovn-kubernetes\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689480 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-slash\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689654 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-run-netns\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689692 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-systemd-units\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689709 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-var-lib-openvswitch\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689784 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-log-socket\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689810 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-etc-openvswitch\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689864 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-kubelet\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689896 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-run-ovn\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689922 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4676a5dc-40e7-415d-b4c3-243fa8485b24-ovnkube-config\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689949 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4676a5dc-40e7-415d-b4c3-243fa8485b24-ovnkube-script-lib\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.689972 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-cni-netd\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.690130 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfffh\" (UniqueName: \"kubernetes.io/projected/4676a5dc-40e7-415d-b4c3-243fa8485b24-kube-api-access-gfffh\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.690279 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-cni-bin\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.690331 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.690384 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-node-log\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.690540 4935 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f839cb87-9d0b-44af-a9a9-8a6df524aa62-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.690571 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmptx\" (UniqueName: \"kubernetes.io/projected/f839cb87-9d0b-44af-a9a9-8a6df524aa62-kube-api-access-hmptx\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.690594 4935 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f839cb87-9d0b-44af-a9a9-8a6df524aa62-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.777003 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzx4x_3f7b45c6-7cf7-420d-afb3-ea00b791af58/kube-multus/2.log" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.777107 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzx4x" event={"ID":"3f7b45c6-7cf7-420d-afb3-ea00b791af58","Type":"ContainerStarted","Data":"c60c2d39a19c8f59a7e6f7fa03717f39b1793ce750eaf17b64b4d9e7fd4465b5"} Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.780128 4935 generic.go:334] "Generic (PLEG): container finished" podID="725a954a-2f06-4951-bfe5-c4db016352ca" containerID="39c857c2e524ced9552a3fbfc63369b098de6d1a1b959f30272795e8cd7157af" exitCode=0 Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.780182 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" event={"ID":"725a954a-2f06-4951-bfe5-c4db016352ca","Type":"ContainerDied","Data":"39c857c2e524ced9552a3fbfc63369b098de6d1a1b959f30272795e8cd7157af"} Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.785302 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" event={"ID":"f839cb87-9d0b-44af-a9a9-8a6df524aa62","Type":"ContainerDied","Data":"a113d13f0ef0e364cd3cdb1953878b38d85e136a708e3f9f696eefd4de936976"} Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.785381 4935 scope.go:117] "RemoveContainer" containerID="d33f5de50fb51090f5f7b3456f4a107e432af52c9e3fc141f21ee67dee73d018" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.785409 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4s97m" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792186 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-run-ovn-kubernetes\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792238 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-slash\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792281 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-run-netns\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792307 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-systemd-units\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792328 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-var-lib-openvswitch\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792340 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-run-ovn-kubernetes\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792363 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-log-socket\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792391 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-etc-openvswitch\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792399 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-run-netns\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792432 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-kubelet\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792456 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-run-ovn\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792480 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-systemd-units\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792482 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4676a5dc-40e7-415d-b4c3-243fa8485b24-ovnkube-config\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792524 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4676a5dc-40e7-415d-b4c3-243fa8485b24-ovnkube-script-lib\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792580 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-cni-netd\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792631 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfffh\" (UniqueName: \"kubernetes.io/projected/4676a5dc-40e7-415d-b4c3-243fa8485b24-kube-api-access-gfffh\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792675 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-cni-bin\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792703 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792738 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-node-log\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792773 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4676a5dc-40e7-415d-b4c3-243fa8485b24-env-overrides\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792831 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-run-systemd\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792853 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-run-openvswitch\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792870 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4676a5dc-40e7-415d-b4c3-243fa8485b24-ovn-node-metrics-cert\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.793346 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4676a5dc-40e7-415d-b4c3-243fa8485b24-ovnkube-config\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792433 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-slash\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.793411 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-log-socket\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.793451 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-run-ovn\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.793453 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-etc-openvswitch\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.792459 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-var-lib-openvswitch\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.793472 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-kubelet\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.793503 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.793528 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-run-systemd\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.793557 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-cni-netd\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.793600 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-host-cni-bin\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.793569 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-node-log\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.793568 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4676a5dc-40e7-415d-b4c3-243fa8485b24-run-openvswitch\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.794047 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4676a5dc-40e7-415d-b4c3-243fa8485b24-env-overrides\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.794279 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4676a5dc-40e7-415d-b4c3-243fa8485b24-ovnkube-script-lib\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.796269 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-w7n72"] Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.796853 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4676a5dc-40e7-415d-b4c3-243fa8485b24-ovn-node-metrics-cert\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.797587 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.811396 4935 scope.go:117] "RemoveContainer" containerID="bd09ed425f051792226a5b988e0b93bca352235ac9eb86febec6c47dc3447ab8" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.821460 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfffh\" (UniqueName: \"kubernetes.io/projected/4676a5dc-40e7-415d-b4c3-243fa8485b24-kube-api-access-gfffh\") pod \"ovnkube-node-hjgvx\" (UID: \"4676a5dc-40e7-415d-b4c3-243fa8485b24\") " pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.837737 4935 scope.go:117] "RemoveContainer" containerID="51aea42fadda98e26f6cd268d84de206ed5485a78dbf373e8184f1e0587e8f1c" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.842620 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.883200 4935 scope.go:117] "RemoveContainer" containerID="ca223f34e4e9185a7fb9a44e382b1921caa6e9ed0ef498e1abd54fa3519abbcd" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.908822 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4s97m"] Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.924754 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4s97m"] Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.980354 4935 scope.go:117] "RemoveContainer" containerID="d07d3eb513e54e2314d51f5b3a159d9bdcd085eb6e79eb5bf33374eb6a0d226d" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.996828 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sczr\" (UniqueName: \"kubernetes.io/projected/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-kube-api-access-6sczr\") pod \"redhat-operators-w7n72\" (UID: \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\") " pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.996906 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-catalog-content\") pod \"redhat-operators-w7n72\" (UID: \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\") " pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:48 crc kubenswrapper[4935]: I1201 18:42:48.996937 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-utilities\") pod \"redhat-operators-w7n72\" (UID: \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\") " pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.026731 4935 scope.go:117] "RemoveContainer" containerID="ae177a64f463684c32ad2d6c789bbd6bfd9977824a0d25323a51294043b3c222" Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.050685 4935 scope.go:117] "RemoveContainer" containerID="f45f21eb860037ff840d87661a00782ace0365b9dcfdc3ea036ff2d15aae89b7" Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.065568 4935 scope.go:117] "RemoveContainer" containerID="043b099791eae1e9fcb86c092227158325f7a5ef9c14c36070ed8d2b3e23d6d2" Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.090306 4935 scope.go:117] "RemoveContainer" containerID="b4b2bb676199b2187a6ef3477bd06f1c7cc86b454d80024491c29e3323487e6d" Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.097942 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sczr\" (UniqueName: \"kubernetes.io/projected/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-kube-api-access-6sczr\") pod \"redhat-operators-w7n72\" (UID: \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\") " pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.098022 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-catalog-content\") pod \"redhat-operators-w7n72\" (UID: \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\") " pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.098097 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-utilities\") pod \"redhat-operators-w7n72\" (UID: \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\") " pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.098768 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-catalog-content\") pod \"redhat-operators-w7n72\" (UID: \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\") " pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.098772 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-utilities\") pod \"redhat-operators-w7n72\" (UID: \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\") " pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.119525 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sczr\" (UniqueName: \"kubernetes.io/projected/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-kube-api-access-6sczr\") pod \"redhat-operators-w7n72\" (UID: \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\") " pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.119863 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:49 crc kubenswrapper[4935]: E1201 18:42:49.141340 4935 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-w7n72_openshift-marketplace_4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c_0(aaa59c97e14764a7fec70a802b5386c1dfe3176e27abe5b9cc168fbf939108b6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 18:42:49 crc kubenswrapper[4935]: E1201 18:42:49.141413 4935 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-w7n72_openshift-marketplace_4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c_0(aaa59c97e14764a7fec70a802b5386c1dfe3176e27abe5b9cc168fbf939108b6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:49 crc kubenswrapper[4935]: E1201 18:42:49.141441 4935 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-w7n72_openshift-marketplace_4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c_0(aaa59c97e14764a7fec70a802b5386c1dfe3176e27abe5b9cc168fbf939108b6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:49 crc kubenswrapper[4935]: E1201 18:42:49.141490 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"redhat-operators-w7n72_openshift-marketplace(4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"redhat-operators-w7n72_openshift-marketplace(4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-w7n72_openshift-marketplace_4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c_0(aaa59c97e14764a7fec70a802b5386c1dfe3176e27abe5b9cc168fbf939108b6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/redhat-operators-w7n72" podUID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.792552 4935 generic.go:334] "Generic (PLEG): container finished" podID="4676a5dc-40e7-415d-b4c3-243fa8485b24" containerID="8d3d20dc187c464f26cb5ca044a81483a67f59da1920336f6072edb2c3fc1475" exitCode=0 Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.792634 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" event={"ID":"4676a5dc-40e7-415d-b4c3-243fa8485b24","Type":"ContainerDied","Data":"8d3d20dc187c464f26cb5ca044a81483a67f59da1920336f6072edb2c3fc1475"} Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.792681 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" event={"ID":"4676a5dc-40e7-415d-b4c3-243fa8485b24","Type":"ContainerStarted","Data":"621a918808683bce0435b8e8d68b385f884986ace0e2860be688c52800320a0c"} Dec 01 18:42:49 crc kubenswrapper[4935]: I1201 18:42:49.876274 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.010784 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfrmt\" (UniqueName: \"kubernetes.io/projected/725a954a-2f06-4951-bfe5-c4db016352ca-kube-api-access-nfrmt\") pod \"725a954a-2f06-4951-bfe5-c4db016352ca\" (UID: \"725a954a-2f06-4951-bfe5-c4db016352ca\") " Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.010874 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/725a954a-2f06-4951-bfe5-c4db016352ca-util\") pod \"725a954a-2f06-4951-bfe5-c4db016352ca\" (UID: \"725a954a-2f06-4951-bfe5-c4db016352ca\") " Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.011005 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/725a954a-2f06-4951-bfe5-c4db016352ca-bundle\") pod \"725a954a-2f06-4951-bfe5-c4db016352ca\" (UID: \"725a954a-2f06-4951-bfe5-c4db016352ca\") " Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.013922 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/725a954a-2f06-4951-bfe5-c4db016352ca-bundle" (OuterVolumeSpecName: "bundle") pod "725a954a-2f06-4951-bfe5-c4db016352ca" (UID: "725a954a-2f06-4951-bfe5-c4db016352ca"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.031417 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/725a954a-2f06-4951-bfe5-c4db016352ca-kube-api-access-nfrmt" (OuterVolumeSpecName: "kube-api-access-nfrmt") pod "725a954a-2f06-4951-bfe5-c4db016352ca" (UID: "725a954a-2f06-4951-bfe5-c4db016352ca"). InnerVolumeSpecName "kube-api-access-nfrmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.111929 4935 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/725a954a-2f06-4951-bfe5-c4db016352ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.111960 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfrmt\" (UniqueName: \"kubernetes.io/projected/725a954a-2f06-4951-bfe5-c4db016352ca-kube-api-access-nfrmt\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.153020 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/725a954a-2f06-4951-bfe5-c4db016352ca-util" (OuterVolumeSpecName: "util") pod "725a954a-2f06-4951-bfe5-c4db016352ca" (UID: "725a954a-2f06-4951-bfe5-c4db016352ca"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.213462 4935 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/725a954a-2f06-4951-bfe5-c4db016352ca-util\") on node \"crc\" DevicePath \"\"" Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.534758 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f839cb87-9d0b-44af-a9a9-8a6df524aa62" path="/var/lib/kubelet/pods/f839cb87-9d0b-44af-a9a9-8a6df524aa62/volumes" Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.801116 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" event={"ID":"4676a5dc-40e7-415d-b4c3-243fa8485b24","Type":"ContainerStarted","Data":"e5d5af4aeb8698695ef0bda8ea2cf72659d7a57448516fd85d3756e198f60ddc"} Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.801542 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" event={"ID":"4676a5dc-40e7-415d-b4c3-243fa8485b24","Type":"ContainerStarted","Data":"9161f373bd05523c6f8717ad8355e802bab1a24fc05947449833688e3454f2af"} Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.801562 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" event={"ID":"4676a5dc-40e7-415d-b4c3-243fa8485b24","Type":"ContainerStarted","Data":"579075284a70916ca62167d70f604b3e9c0800296fa62a0b09783dfa419432ce"} Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.801575 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" event={"ID":"4676a5dc-40e7-415d-b4c3-243fa8485b24","Type":"ContainerStarted","Data":"037d1718025c94a66215057b578a87e48c103c542bc203b938307d613d38d62e"} Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.801587 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" event={"ID":"4676a5dc-40e7-415d-b4c3-243fa8485b24","Type":"ContainerStarted","Data":"79ccc88e4d1587bb8b81acac7211798578d30c2d96b88261cb4318871e18b27d"} Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.803765 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" event={"ID":"725a954a-2f06-4951-bfe5-c4db016352ca","Type":"ContainerDied","Data":"07169d1f6d5ff2bf604e85b3300123818024074118c86b8b02ea646c90d7fb34"} Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.803796 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="07169d1f6d5ff2bf604e85b3300123818024074118c86b8b02ea646c90d7fb34" Dec 01 18:42:50 crc kubenswrapper[4935]: I1201 18:42:50.803828 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk" Dec 01 18:42:51 crc kubenswrapper[4935]: I1201 18:42:51.816013 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" event={"ID":"4676a5dc-40e7-415d-b4c3-243fa8485b24","Type":"ContainerStarted","Data":"8b43f889ce6264b015428e350a7079b80744ed5465cd37c72acfa5da10b4af3d"} Dec 01 18:42:53 crc kubenswrapper[4935]: I1201 18:42:53.833665 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" event={"ID":"4676a5dc-40e7-415d-b4c3-243fa8485b24","Type":"ContainerStarted","Data":"a0ee80506b9490819ab5dadca6677d6bc4f655eb6b75e79e056fb992873df46a"} Dec 01 18:42:54 crc kubenswrapper[4935]: I1201 18:42:54.346553 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:42:54 crc kubenswrapper[4935]: I1201 18:42:54.346614 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:42:54 crc kubenswrapper[4935]: I1201 18:42:54.346668 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:42:54 crc kubenswrapper[4935]: I1201 18:42:54.347360 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f03453b0bfa80f20c0452bdf75f21c17981d4d486ecd0aa51da07478cdd727f3"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 18:42:54 crc kubenswrapper[4935]: I1201 18:42:54.347428 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://f03453b0bfa80f20c0452bdf75f21c17981d4d486ecd0aa51da07478cdd727f3" gracePeriod=600 Dec 01 18:42:55 crc kubenswrapper[4935]: I1201 18:42:55.878474 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="f03453b0bfa80f20c0452bdf75f21c17981d4d486ecd0aa51da07478cdd727f3" exitCode=0 Dec 01 18:42:55 crc kubenswrapper[4935]: I1201 18:42:55.878533 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"f03453b0bfa80f20c0452bdf75f21c17981d4d486ecd0aa51da07478cdd727f3"} Dec 01 18:42:55 crc kubenswrapper[4935]: I1201 18:42:55.879198 4935 scope.go:117] "RemoveContainer" containerID="b347e6f589fbccdde8049b52c2b8f25c113125fe0e4295d71410044e8cbbc0ae" Dec 01 18:42:56 crc kubenswrapper[4935]: I1201 18:42:56.888039 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" event={"ID":"4676a5dc-40e7-415d-b4c3-243fa8485b24","Type":"ContainerStarted","Data":"4787699bc6fa09676d3dadc8b2b50516e35e46589c136d8c9260e6585b4bfd6d"} Dec 01 18:42:56 crc kubenswrapper[4935]: I1201 18:42:56.888495 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:56 crc kubenswrapper[4935]: I1201 18:42:56.888516 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:56 crc kubenswrapper[4935]: I1201 18:42:56.935191 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" podStartSLOduration=8.93514092 podStartE2EDuration="8.93514092s" podCreationTimestamp="2025-12-01 18:42:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:42:56.932077443 +0000 UTC m=+790.953706722" watchObservedRunningTime="2025-12-01 18:42:56.93514092 +0000 UTC m=+790.956770179" Dec 01 18:42:56 crc kubenswrapper[4935]: I1201 18:42:56.949385 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:57 crc kubenswrapper[4935]: I1201 18:42:57.408743 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w7n72"] Dec 01 18:42:57 crc kubenswrapper[4935]: I1201 18:42:57.409391 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:57 crc kubenswrapper[4935]: I1201 18:42:57.409928 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:57 crc kubenswrapper[4935]: E1201 18:42:57.441082 4935 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-w7n72_openshift-marketplace_4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c_0(a571c6e8726f27388f2cda12f642cf7529dd9de5ef1e538351cd57d98b1d8c70): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 18:42:57 crc kubenswrapper[4935]: E1201 18:42:57.441195 4935 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-w7n72_openshift-marketplace_4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c_0(a571c6e8726f27388f2cda12f642cf7529dd9de5ef1e538351cd57d98b1d8c70): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:57 crc kubenswrapper[4935]: E1201 18:42:57.441222 4935 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-w7n72_openshift-marketplace_4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c_0(a571c6e8726f27388f2cda12f642cf7529dd9de5ef1e538351cd57d98b1d8c70): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:42:57 crc kubenswrapper[4935]: E1201 18:42:57.441277 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"redhat-operators-w7n72_openshift-marketplace(4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"redhat-operators-w7n72_openshift-marketplace(4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-w7n72_openshift-marketplace_4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c_0(a571c6e8726f27388f2cda12f642cf7529dd9de5ef1e538351cd57d98b1d8c70): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/redhat-operators-w7n72" podUID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" Dec 01 18:42:57 crc kubenswrapper[4935]: I1201 18:42:57.899685 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"744bd448e7cc386bf9953720a69481d4b4d71c4c1477d84184ae1c1693198763"} Dec 01 18:42:57 crc kubenswrapper[4935]: I1201 18:42:57.900128 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:57 crc kubenswrapper[4935]: I1201 18:42:57.942218 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.706058 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-4fc9p"] Dec 01 18:42:59 crc kubenswrapper[4935]: E1201 18:42:59.706637 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="725a954a-2f06-4951-bfe5-c4db016352ca" containerName="extract" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.706649 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="725a954a-2f06-4951-bfe5-c4db016352ca" containerName="extract" Dec 01 18:42:59 crc kubenswrapper[4935]: E1201 18:42:59.706666 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="725a954a-2f06-4951-bfe5-c4db016352ca" containerName="util" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.706673 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="725a954a-2f06-4951-bfe5-c4db016352ca" containerName="util" Dec 01 18:42:59 crc kubenswrapper[4935]: E1201 18:42:59.706688 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="725a954a-2f06-4951-bfe5-c4db016352ca" containerName="pull" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.706696 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="725a954a-2f06-4951-bfe5-c4db016352ca" containerName="pull" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.706804 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="725a954a-2f06-4951-bfe5-c4db016352ca" containerName="extract" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.707213 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-4fc9p" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.709028 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-nc7jg" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.709246 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.709749 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.726667 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-4fc9p"] Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.812014 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8"] Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.812781 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.819111 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.819398 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-dxsd4" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.833530 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c"] Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.834250 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.847896 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c"] Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.859853 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n7rk\" (UniqueName: \"kubernetes.io/projected/c4d2ca33-292e-45b0-b5b0-972516b76b0d-kube-api-access-9n7rk\") pod \"obo-prometheus-operator-668cf9dfbb-4fc9p\" (UID: \"c4d2ca33-292e-45b0-b5b0-972516b76b0d\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-4fc9p" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.896054 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8"] Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.961183 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/133161d9-93f5-4437-bb96-28c2726db1ed-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c\" (UID: \"133161d9-93f5-4437-bb96-28c2726db1ed\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.961235 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n7rk\" (UniqueName: \"kubernetes.io/projected/c4d2ca33-292e-45b0-b5b0-972516b76b0d-kube-api-access-9n7rk\") pod \"obo-prometheus-operator-668cf9dfbb-4fc9p\" (UID: \"c4d2ca33-292e-45b0-b5b0-972516b76b0d\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-4fc9p" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.961272 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0fa7af9d-21c4-4521-a988-05c5043e7e51-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8\" (UID: \"0fa7af9d-21c4-4521-a988-05c5043e7e51\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.961322 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0fa7af9d-21c4-4521-a988-05c5043e7e51-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8\" (UID: \"0fa7af9d-21c4-4521-a988-05c5043e7e51\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.961362 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/133161d9-93f5-4437-bb96-28c2726db1ed-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c\" (UID: \"133161d9-93f5-4437-bb96-28c2726db1ed\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c" Dec 01 18:42:59 crc kubenswrapper[4935]: I1201 18:42:59.987286 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n7rk\" (UniqueName: \"kubernetes.io/projected/c4d2ca33-292e-45b0-b5b0-972516b76b0d-kube-api-access-9n7rk\") pod \"obo-prometheus-operator-668cf9dfbb-4fc9p\" (UID: \"c4d2ca33-292e-45b0-b5b0-972516b76b0d\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-4fc9p" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.003149 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-mhznx"] Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.003897 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" Dec 01 18:43:00 crc kubenswrapper[4935]: W1201 18:43:00.006373 4935 reflector.go:561] object-"openshift-operators"/"observability-operator-sa-dockercfg-dh57n": failed to list *v1.Secret: secrets "observability-operator-sa-dockercfg-dh57n" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-operators": no relationship found between node 'crc' and this object Dec 01 18:43:00 crc kubenswrapper[4935]: E1201 18:43:00.006424 4935 reflector.go:158] "Unhandled Error" err="object-\"openshift-operators\"/\"observability-operator-sa-dockercfg-dh57n\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"observability-operator-sa-dockercfg-dh57n\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 01 18:43:00 crc kubenswrapper[4935]: W1201 18:43:00.006518 4935 reflector.go:561] object-"openshift-operators"/"observability-operator-tls": failed to list *v1.Secret: secrets "observability-operator-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-operators": no relationship found between node 'crc' and this object Dec 01 18:43:00 crc kubenswrapper[4935]: E1201 18:43:00.006534 4935 reflector.go:158] "Unhandled Error" err="object-\"openshift-operators\"/\"observability-operator-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"observability-operator-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.020903 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-mhznx"] Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.025423 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-4fc9p" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.062655 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/133161d9-93f5-4437-bb96-28c2726db1ed-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c\" (UID: \"133161d9-93f5-4437-bb96-28c2726db1ed\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.062753 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0fa7af9d-21c4-4521-a988-05c5043e7e51-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8\" (UID: \"0fa7af9d-21c4-4521-a988-05c5043e7e51\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.062776 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0fa7af9d-21c4-4521-a988-05c5043e7e51-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8\" (UID: \"0fa7af9d-21c4-4521-a988-05c5043e7e51\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.062819 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/133161d9-93f5-4437-bb96-28c2726db1ed-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c\" (UID: \"133161d9-93f5-4437-bb96-28c2726db1ed\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.066771 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/133161d9-93f5-4437-bb96-28c2726db1ed-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c\" (UID: \"133161d9-93f5-4437-bb96-28c2726db1ed\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.073707 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0fa7af9d-21c4-4521-a988-05c5043e7e51-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8\" (UID: \"0fa7af9d-21c4-4521-a988-05c5043e7e51\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.073769 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/133161d9-93f5-4437-bb96-28c2726db1ed-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c\" (UID: \"133161d9-93f5-4437-bb96-28c2726db1ed\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.074424 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0fa7af9d-21c4-4521-a988-05c5043e7e51-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8\" (UID: \"0fa7af9d-21c4-4521-a988-05c5043e7e51\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.126465 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.152805 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.164101 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4tpd\" (UniqueName: \"kubernetes.io/projected/f228055b-0716-4766-a2b7-dabacf9de9ad-kube-api-access-m4tpd\") pod \"observability-operator-d8bb48f5d-mhznx\" (UID: \"f228055b-0716-4766-a2b7-dabacf9de9ad\") " pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.164276 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/f228055b-0716-4766-a2b7-dabacf9de9ad-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-mhznx\" (UID: \"f228055b-0716-4766-a2b7-dabacf9de9ad\") " pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.221994 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-htdg4"] Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.222922 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-htdg4" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.227739 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-5sp2j" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.248539 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-htdg4"] Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.266450 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4tpd\" (UniqueName: \"kubernetes.io/projected/f228055b-0716-4766-a2b7-dabacf9de9ad-kube-api-access-m4tpd\") pod \"observability-operator-d8bb48f5d-mhznx\" (UID: \"f228055b-0716-4766-a2b7-dabacf9de9ad\") " pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.266524 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/f228055b-0716-4766-a2b7-dabacf9de9ad-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-mhznx\" (UID: \"f228055b-0716-4766-a2b7-dabacf9de9ad\") " pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.288816 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4tpd\" (UniqueName: \"kubernetes.io/projected/f228055b-0716-4766-a2b7-dabacf9de9ad-kube-api-access-m4tpd\") pod \"observability-operator-d8bb48f5d-mhznx\" (UID: \"f228055b-0716-4766-a2b7-dabacf9de9ad\") " pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.367794 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/c1d7978e-7d35-4b9a-97f8-981562161cde-openshift-service-ca\") pod \"perses-operator-5446b9c989-htdg4\" (UID: \"c1d7978e-7d35-4b9a-97f8-981562161cde\") " pod="openshift-operators/perses-operator-5446b9c989-htdg4" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.367888 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6xvv\" (UniqueName: \"kubernetes.io/projected/c1d7978e-7d35-4b9a-97f8-981562161cde-kube-api-access-g6xvv\") pod \"perses-operator-5446b9c989-htdg4\" (UID: \"c1d7978e-7d35-4b9a-97f8-981562161cde\") " pod="openshift-operators/perses-operator-5446b9c989-htdg4" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.462672 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8"] Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.469616 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6xvv\" (UniqueName: \"kubernetes.io/projected/c1d7978e-7d35-4b9a-97f8-981562161cde-kube-api-access-g6xvv\") pod \"perses-operator-5446b9c989-htdg4\" (UID: \"c1d7978e-7d35-4b9a-97f8-981562161cde\") " pod="openshift-operators/perses-operator-5446b9c989-htdg4" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.469773 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/c1d7978e-7d35-4b9a-97f8-981562161cde-openshift-service-ca\") pod \"perses-operator-5446b9c989-htdg4\" (UID: \"c1d7978e-7d35-4b9a-97f8-981562161cde\") " pod="openshift-operators/perses-operator-5446b9c989-htdg4" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.470797 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/c1d7978e-7d35-4b9a-97f8-981562161cde-openshift-service-ca\") pod \"perses-operator-5446b9c989-htdg4\" (UID: \"c1d7978e-7d35-4b9a-97f8-981562161cde\") " pod="openshift-operators/perses-operator-5446b9c989-htdg4" Dec 01 18:43:00 crc kubenswrapper[4935]: W1201 18:43:00.475491 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0fa7af9d_21c4_4521_a988_05c5043e7e51.slice/crio-241a3507eef55f5090d7389f747c2f50cad9a494042a71a2a055bf6e4b720f14 WatchSource:0}: Error finding container 241a3507eef55f5090d7389f747c2f50cad9a494042a71a2a055bf6e4b720f14: Status 404 returned error can't find the container with id 241a3507eef55f5090d7389f747c2f50cad9a494042a71a2a055bf6e4b720f14 Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.489881 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6xvv\" (UniqueName: \"kubernetes.io/projected/c1d7978e-7d35-4b9a-97f8-981562161cde-kube-api-access-g6xvv\") pod \"perses-operator-5446b9c989-htdg4\" (UID: \"c1d7978e-7d35-4b9a-97f8-981562161cde\") " pod="openshift-operators/perses-operator-5446b9c989-htdg4" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.521344 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-4fc9p"] Dec 01 18:43:00 crc kubenswrapper[4935]: W1201 18:43:00.523681 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4d2ca33_292e_45b0_b5b0_972516b76b0d.slice/crio-65de911361f4dd2dc5bfa41d0f5c52c663cf25916618f71bd8313166de002c91 WatchSource:0}: Error finding container 65de911361f4dd2dc5bfa41d0f5c52c663cf25916618f71bd8313166de002c91: Status 404 returned error can't find the container with id 65de911361f4dd2dc5bfa41d0f5c52c663cf25916618f71bd8313166de002c91 Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.558812 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c"] Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.568965 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-htdg4" Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.810587 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-htdg4"] Dec 01 18:43:00 crc kubenswrapper[4935]: W1201 18:43:00.814484 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1d7978e_7d35_4b9a_97f8_981562161cde.slice/crio-178463b061bf5502c2c5383ed6e885464e4c460ea910fa42022bddd9cce4a64c WatchSource:0}: Error finding container 178463b061bf5502c2c5383ed6e885464e4c460ea910fa42022bddd9cce4a64c: Status 404 returned error can't find the container with id 178463b061bf5502c2c5383ed6e885464e4c460ea910fa42022bddd9cce4a64c Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.916576 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c" event={"ID":"133161d9-93f5-4437-bb96-28c2726db1ed","Type":"ContainerStarted","Data":"1dfd5b161e893faa83e20bd057fd8459059b7823dda80664b28053ff7852a54b"} Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.917820 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8" event={"ID":"0fa7af9d-21c4-4521-a988-05c5043e7e51","Type":"ContainerStarted","Data":"241a3507eef55f5090d7389f747c2f50cad9a494042a71a2a055bf6e4b720f14"} Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.918976 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-4fc9p" event={"ID":"c4d2ca33-292e-45b0-b5b0-972516b76b0d","Type":"ContainerStarted","Data":"65de911361f4dd2dc5bfa41d0f5c52c663cf25916618f71bd8313166de002c91"} Dec 01 18:43:00 crc kubenswrapper[4935]: I1201 18:43:00.919994 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-htdg4" event={"ID":"c1d7978e-7d35-4b9a-97f8-981562161cde","Type":"ContainerStarted","Data":"178463b061bf5502c2c5383ed6e885464e4c460ea910fa42022bddd9cce4a64c"} Dec 01 18:43:01 crc kubenswrapper[4935]: I1201 18:43:01.154772 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-dh57n" Dec 01 18:43:01 crc kubenswrapper[4935]: E1201 18:43:01.267557 4935 secret.go:188] Couldn't get secret openshift-operators/observability-operator-tls: failed to sync secret cache: timed out waiting for the condition Dec 01 18:43:01 crc kubenswrapper[4935]: E1201 18:43:01.267649 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f228055b-0716-4766-a2b7-dabacf9de9ad-observability-operator-tls podName:f228055b-0716-4766-a2b7-dabacf9de9ad nodeName:}" failed. No retries permitted until 2025-12-01 18:43:01.767627 +0000 UTC m=+795.789256259 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "observability-operator-tls" (UniqueName: "kubernetes.io/secret/f228055b-0716-4766-a2b7-dabacf9de9ad-observability-operator-tls") pod "observability-operator-d8bb48f5d-mhznx" (UID: "f228055b-0716-4766-a2b7-dabacf9de9ad") : failed to sync secret cache: timed out waiting for the condition Dec 01 18:43:01 crc kubenswrapper[4935]: I1201 18:43:01.461137 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 01 18:43:01 crc kubenswrapper[4935]: I1201 18:43:01.786464 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/f228055b-0716-4766-a2b7-dabacf9de9ad-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-mhznx\" (UID: \"f228055b-0716-4766-a2b7-dabacf9de9ad\") " pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" Dec 01 18:43:01 crc kubenswrapper[4935]: I1201 18:43:01.795934 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/f228055b-0716-4766-a2b7-dabacf9de9ad-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-mhznx\" (UID: \"f228055b-0716-4766-a2b7-dabacf9de9ad\") " pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" Dec 01 18:43:01 crc kubenswrapper[4935]: I1201 18:43:01.841112 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" Dec 01 18:43:02 crc kubenswrapper[4935]: I1201 18:43:02.303318 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-mhznx"] Dec 01 18:43:02 crc kubenswrapper[4935]: I1201 18:43:02.947953 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" event={"ID":"f228055b-0716-4766-a2b7-dabacf9de9ad","Type":"ContainerStarted","Data":"71d6f96f2a14db6286f41c67387b116addac01b2303729192c881de225c3268e"} Dec 01 18:43:08 crc kubenswrapper[4935]: I1201 18:43:08.512660 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:43:08 crc kubenswrapper[4935]: I1201 18:43:08.513193 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:43:15 crc kubenswrapper[4935]: E1201 18:43:15.906797 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385" Dec 01 18:43:15 crc kubenswrapper[4935]: E1201 18:43:15.907679 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g6xvv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-5446b9c989-htdg4_openshift-operators(c1d7978e-7d35-4b9a-97f8-981562161cde): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 18:43:15 crc kubenswrapper[4935]: E1201 18:43:15.908971 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-5446b9c989-htdg4" podUID="c1d7978e-7d35-4b9a-97f8-981562161cde" Dec 01 18:43:16 crc kubenswrapper[4935]: E1201 18:43:16.042168 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385\\\"\"" pod="openshift-operators/perses-operator-5446b9c989-htdg4" podUID="c1d7978e-7d35-4b9a-97f8-981562161cde" Dec 01 18:43:18 crc kubenswrapper[4935]: E1201 18:43:18.568077 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb" Dec 01 18:43:18 crc kubenswrapper[4935]: E1201 18:43:18.568716 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb,Command:[],Args:[--namespace=$(NAMESPACE) --images=perses=$(RELATED_IMAGE_PERSES) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:e718854a7d6ca8accf0fa72db0eb902e46c44d747ad51dc3f06bba0cefaa3c01,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:17ea20be390a94ab39f5cdd7f0cbc2498046eebcf77fe3dec9aa288d5c2cf46b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:91531137fc1dcd740e277e0f65e120a0176a16f788c14c27925b61aa0b792ade,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:a69da8bbca8a28dd2925f864d51cc31cf761b10532c553095ba40b242ef701cb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:897e1bfad1187062725b54d87107bd0155972257a50d8335dd29e1999b828a4f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:95fe5b5746ca8c07ac9217ce2d8ac8e6afad17af210f9d8e0074df1310b209a8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:e9d9a89e4d8126a62b1852055482258ee528cac6398dd5d43ebad75ace0f33c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:ec684a0645ceb917b019af7ddba68c3533416e356ab0d0320a30e75ca7ebb31b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:3b9693fcde9b3a9494fb04735b1f7cfd0426f10be820fdc3f024175c0d3df1c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:580606f194180accc8abba099e17a26dca7522ec6d233fa2fdd40312771703e3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:e03777be39e71701935059cd877603874a13ac94daa73219d4e5e545599d78a9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:aa47256193cfd2877853878e1ae97d2ab8b8e5deae62b387cbfad02b284d379c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:c595ff56b2cb85514bf4784db6ddb82e4e657e3e708a7fb695fc4997379a94d4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:45a4ec2a519bcec99e886aa91596d5356a2414a2bd103baaef9fa7838c672eb2,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m4tpd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-d8bb48f5d-mhznx_openshift-operators(f228055b-0716-4766-a2b7-dabacf9de9ad): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 18:43:18 crc kubenswrapper[4935]: E1201 18:43:18.570481 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" podUID="f228055b-0716-4766-a2b7-dabacf9de9ad" Dec 01 18:43:18 crc kubenswrapper[4935]: I1201 18:43:18.870487 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hjgvx" Dec 01 18:43:19 crc kubenswrapper[4935]: E1201 18:43:19.060935 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb\\\"\"" pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" podUID="f228055b-0716-4766-a2b7-dabacf9de9ad" Dec 01 18:43:19 crc kubenswrapper[4935]: E1201 18:43:19.397024 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3" Dec 01 18:43:19 crc kubenswrapper[4935]: E1201 18:43:19.397436 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9n7rk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-668cf9dfbb-4fc9p_openshift-operators(c4d2ca33-292e-45b0-b5b0-972516b76b0d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 18:43:19 crc kubenswrapper[4935]: E1201 18:43:19.398709 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-4fc9p" podUID="c4d2ca33-292e-45b0-b5b0-972516b76b0d" Dec 01 18:43:19 crc kubenswrapper[4935]: E1201 18:43:19.413915 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 01 18:43:19 crc kubenswrapper[4935]: E1201 18:43:19.414092 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c_openshift-operators(133161d9-93f5-4437-bb96-28c2726db1ed): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 18:43:19 crc kubenswrapper[4935]: E1201 18:43:19.418518 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c" podUID="133161d9-93f5-4437-bb96-28c2726db1ed" Dec 01 18:43:19 crc kubenswrapper[4935]: I1201 18:43:19.783459 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w7n72"] Dec 01 18:43:19 crc kubenswrapper[4935]: W1201 18:43:19.798138 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4767107e_f12c_4ce2_91cd_9aa7c3ae4d9c.slice/crio-c100ec73e05bccd93711d6f69605a22799d40dd1c5c03beb0523960f8653c377 WatchSource:0}: Error finding container c100ec73e05bccd93711d6f69605a22799d40dd1c5c03beb0523960f8653c377: Status 404 returned error can't find the container with id c100ec73e05bccd93711d6f69605a22799d40dd1c5c03beb0523960f8653c377 Dec 01 18:43:20 crc kubenswrapper[4935]: I1201 18:43:20.066501 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8" event={"ID":"0fa7af9d-21c4-4521-a988-05c5043e7e51","Type":"ContainerStarted","Data":"dcfccb3c7e13696d1e6133e1c3f0cae33e7f4de52d376c32abb9c83deffc9ecc"} Dec 01 18:43:20 crc kubenswrapper[4935]: I1201 18:43:20.070235 4935 generic.go:334] "Generic (PLEG): container finished" podID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" containerID="9cf8526c1e9a7f0aec1b126bb28327676eaf5a2de5ba0d1c95af02986faa668e" exitCode=0 Dec 01 18:43:20 crc kubenswrapper[4935]: I1201 18:43:20.070357 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w7n72" event={"ID":"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c","Type":"ContainerDied","Data":"9cf8526c1e9a7f0aec1b126bb28327676eaf5a2de5ba0d1c95af02986faa668e"} Dec 01 18:43:20 crc kubenswrapper[4935]: I1201 18:43:20.070405 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w7n72" event={"ID":"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c","Type":"ContainerStarted","Data":"c100ec73e05bccd93711d6f69605a22799d40dd1c5c03beb0523960f8653c377"} Dec 01 18:43:20 crc kubenswrapper[4935]: E1201 18:43:20.080676 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3\\\"\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-4fc9p" podUID="c4d2ca33-292e-45b0-b5b0-972516b76b0d" Dec 01 18:43:20 crc kubenswrapper[4935]: I1201 18:43:20.102095 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8" podStartSLOduration=2.190340404 podStartE2EDuration="21.102066125s" podCreationTimestamp="2025-12-01 18:42:59 +0000 UTC" firstStartedPulling="2025-12-01 18:43:00.479327917 +0000 UTC m=+794.500957176" lastFinishedPulling="2025-12-01 18:43:19.391053638 +0000 UTC m=+813.412682897" observedRunningTime="2025-12-01 18:43:20.096273981 +0000 UTC m=+814.117903250" watchObservedRunningTime="2025-12-01 18:43:20.102066125 +0000 UTC m=+814.123695384" Dec 01 18:43:21 crc kubenswrapper[4935]: I1201 18:43:21.082804 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c" event={"ID":"133161d9-93f5-4437-bb96-28c2726db1ed","Type":"ContainerStarted","Data":"d32ae7acdc65dbb677173045947a7a8d900817edf9025f4df8572a36952e328c"} Dec 01 18:43:21 crc kubenswrapper[4935]: I1201 18:43:21.116174 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c" podStartSLOduration=-9223372014.738642 podStartE2EDuration="22.116133994s" podCreationTimestamp="2025-12-01 18:42:59 +0000 UTC" firstStartedPulling="2025-12-01 18:43:00.579982653 +0000 UTC m=+794.601611912" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:43:21.112441777 +0000 UTC m=+815.134071036" watchObservedRunningTime="2025-12-01 18:43:21.116133994 +0000 UTC m=+815.137763273" Dec 01 18:43:22 crc kubenswrapper[4935]: I1201 18:43:22.090878 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w7n72" event={"ID":"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c","Type":"ContainerStarted","Data":"a87384dab927b1d6f09b701c5768c07c77c26965fb3adc6890c63c3286dceefd"} Dec 01 18:43:23 crc kubenswrapper[4935]: I1201 18:43:23.097720 4935 generic.go:334] "Generic (PLEG): container finished" podID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" containerID="a87384dab927b1d6f09b701c5768c07c77c26965fb3adc6890c63c3286dceefd" exitCode=0 Dec 01 18:43:23 crc kubenswrapper[4935]: I1201 18:43:23.097771 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w7n72" event={"ID":"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c","Type":"ContainerDied","Data":"a87384dab927b1d6f09b701c5768c07c77c26965fb3adc6890c63c3286dceefd"} Dec 01 18:43:24 crc kubenswrapper[4935]: I1201 18:43:24.107970 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w7n72" event={"ID":"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c","Type":"ContainerStarted","Data":"3224509953f9796633c77e08d7a3ef804271906d8a2a74a9452b3f7e1be89795"} Dec 01 18:43:24 crc kubenswrapper[4935]: I1201 18:43:24.138268 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-w7n72" podStartSLOduration=32.552843002 podStartE2EDuration="36.138240395s" podCreationTimestamp="2025-12-01 18:42:48 +0000 UTC" firstStartedPulling="2025-12-01 18:43:20.081008597 +0000 UTC m=+814.102637856" lastFinishedPulling="2025-12-01 18:43:23.66640597 +0000 UTC m=+817.688035249" observedRunningTime="2025-12-01 18:43:24.127182755 +0000 UTC m=+818.148812024" watchObservedRunningTime="2025-12-01 18:43:24.138240395 +0000 UTC m=+818.159869674" Dec 01 18:43:29 crc kubenswrapper[4935]: I1201 18:43:29.120178 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:43:29 crc kubenswrapper[4935]: I1201 18:43:29.120853 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:43:29 crc kubenswrapper[4935]: I1201 18:43:29.199341 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:43:29 crc kubenswrapper[4935]: I1201 18:43:29.262565 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:43:29 crc kubenswrapper[4935]: I1201 18:43:29.439088 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w7n72"] Dec 01 18:43:31 crc kubenswrapper[4935]: I1201 18:43:31.163379 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-w7n72" podUID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" containerName="registry-server" containerID="cri-o://3224509953f9796633c77e08d7a3ef804271906d8a2a74a9452b3f7e1be89795" gracePeriod=2 Dec 01 18:43:35 crc kubenswrapper[4935]: I1201 18:43:35.190514 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-htdg4" event={"ID":"c1d7978e-7d35-4b9a-97f8-981562161cde","Type":"ContainerStarted","Data":"4baa9fd00b3f4fac4c82178cadd3346865915f7f868aea77517a175e9c85dab4"} Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.198664 4935 generic.go:334] "Generic (PLEG): container finished" podID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" containerID="3224509953f9796633c77e08d7a3ef804271906d8a2a74a9452b3f7e1be89795" exitCode=0 Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.198818 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w7n72" event={"ID":"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c","Type":"ContainerDied","Data":"3224509953f9796633c77e08d7a3ef804271906d8a2a74a9452b3f7e1be89795"} Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.199268 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-htdg4" Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.223226 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-htdg4" podStartSLOduration=3.642531535 podStartE2EDuration="36.223203442s" podCreationTimestamp="2025-12-01 18:43:00 +0000 UTC" firstStartedPulling="2025-12-01 18:43:00.817091949 +0000 UTC m=+794.838721208" lastFinishedPulling="2025-12-01 18:43:33.397763856 +0000 UTC m=+827.419393115" observedRunningTime="2025-12-01 18:43:36.217946895 +0000 UTC m=+830.239576174" watchObservedRunningTime="2025-12-01 18:43:36.223203442 +0000 UTC m=+830.244832701" Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.448458 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.594807 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-utilities\") pod \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\" (UID: \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\") " Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.595110 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-catalog-content\") pod \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\" (UID: \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\") " Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.595226 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6sczr\" (UniqueName: \"kubernetes.io/projected/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-kube-api-access-6sczr\") pod \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\" (UID: \"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c\") " Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.595945 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-utilities" (OuterVolumeSpecName: "utilities") pod "4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" (UID: "4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.601438 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-kube-api-access-6sczr" (OuterVolumeSpecName: "kube-api-access-6sczr") pod "4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" (UID: "4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c"). InnerVolumeSpecName "kube-api-access-6sczr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.697482 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6sczr\" (UniqueName: \"kubernetes.io/projected/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-kube-api-access-6sczr\") on node \"crc\" DevicePath \"\"" Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.697545 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.702196 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" (UID: "4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:43:36 crc kubenswrapper[4935]: I1201 18:43:36.798286 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:43:37 crc kubenswrapper[4935]: I1201 18:43:37.214917 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w7n72" event={"ID":"4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c","Type":"ContainerDied","Data":"c100ec73e05bccd93711d6f69605a22799d40dd1c5c03beb0523960f8653c377"} Dec 01 18:43:37 crc kubenswrapper[4935]: I1201 18:43:37.214939 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w7n72" Dec 01 18:43:37 crc kubenswrapper[4935]: I1201 18:43:37.214969 4935 scope.go:117] "RemoveContainer" containerID="3224509953f9796633c77e08d7a3ef804271906d8a2a74a9452b3f7e1be89795" Dec 01 18:43:37 crc kubenswrapper[4935]: I1201 18:43:37.217966 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" event={"ID":"f228055b-0716-4766-a2b7-dabacf9de9ad","Type":"ContainerStarted","Data":"d586c3fe8272e449cd8e3eefcbf11e5c2f2231138a17cc77c86bf1e2e600b00f"} Dec 01 18:43:37 crc kubenswrapper[4935]: I1201 18:43:37.218354 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" Dec 01 18:43:37 crc kubenswrapper[4935]: I1201 18:43:37.247729 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" podStartSLOduration=4.251126056 podStartE2EDuration="38.247711221s" podCreationTimestamp="2025-12-01 18:42:59 +0000 UTC" firstStartedPulling="2025-12-01 18:43:02.297677806 +0000 UTC m=+796.319307065" lastFinishedPulling="2025-12-01 18:43:36.294262971 +0000 UTC m=+830.315892230" observedRunningTime="2025-12-01 18:43:37.245531662 +0000 UTC m=+831.267160931" watchObservedRunningTime="2025-12-01 18:43:37.247711221 +0000 UTC m=+831.269340480" Dec 01 18:43:37 crc kubenswrapper[4935]: I1201 18:43:37.248241 4935 scope.go:117] "RemoveContainer" containerID="a87384dab927b1d6f09b701c5768c07c77c26965fb3adc6890c63c3286dceefd" Dec 01 18:43:37 crc kubenswrapper[4935]: I1201 18:43:37.264886 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w7n72"] Dec 01 18:43:37 crc kubenswrapper[4935]: I1201 18:43:37.276344 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-w7n72"] Dec 01 18:43:37 crc kubenswrapper[4935]: I1201 18:43:37.286506 4935 scope.go:117] "RemoveContainer" containerID="9cf8526c1e9a7f0aec1b126bb28327676eaf5a2de5ba0d1c95af02986faa668e" Dec 01 18:43:37 crc kubenswrapper[4935]: I1201 18:43:37.300400 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-mhznx" Dec 01 18:43:38 crc kubenswrapper[4935]: I1201 18:43:38.519483 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" path="/var/lib/kubelet/pods/4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c/volumes" Dec 01 18:43:40 crc kubenswrapper[4935]: I1201 18:43:40.572582 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-htdg4" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.758487 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-s7jbr"] Dec 01 18:43:43 crc kubenswrapper[4935]: E1201 18:43:43.759252 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" containerName="registry-server" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.759273 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" containerName="registry-server" Dec 01 18:43:43 crc kubenswrapper[4935]: E1201 18:43:43.759298 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" containerName="extract-utilities" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.759335 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" containerName="extract-utilities" Dec 01 18:43:43 crc kubenswrapper[4935]: E1201 18:43:43.759365 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" containerName="extract-content" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.759378 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" containerName="extract-content" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.759561 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="4767107e-f12c-4ce2-91cd-9aa7c3ae4d9c" containerName="registry-server" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.760796 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-s7jbr" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.763489 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.763711 4935 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-49w9d" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.763888 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.773305 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-s7jbr"] Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.805563 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-7hwdt"] Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.806339 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-7hwdt" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.811319 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-k9tw2"] Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.812109 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-k9tw2" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.813562 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gvmz\" (UniqueName: \"kubernetes.io/projected/4fba61c9-f590-402c-a1b1-2a861a4b1bb6-kube-api-access-7gvmz\") pod \"cert-manager-cainjector-7f985d654d-s7jbr\" (UID: \"4fba61c9-f590-402c-a1b1-2a861a4b1bb6\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-s7jbr" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.814897 4935 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-2jnjt" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.818054 4935 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-lfjfz" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.832906 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-k9tw2"] Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.840449 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-7hwdt"] Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.915893 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvdk6\" (UniqueName: \"kubernetes.io/projected/1ee1742c-1268-40ba-a472-ca0184dd5fae-kube-api-access-nvdk6\") pod \"cert-manager-webhook-5655c58dd6-k9tw2\" (UID: \"1ee1742c-1268-40ba-a472-ca0184dd5fae\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-k9tw2" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.915948 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr82g\" (UniqueName: \"kubernetes.io/projected/54039121-127f-4ce7-b6d9-3dff080bc6ae-kube-api-access-zr82g\") pod \"cert-manager-5b446d88c5-7hwdt\" (UID: \"54039121-127f-4ce7-b6d9-3dff080bc6ae\") " pod="cert-manager/cert-manager-5b446d88c5-7hwdt" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.915993 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gvmz\" (UniqueName: \"kubernetes.io/projected/4fba61c9-f590-402c-a1b1-2a861a4b1bb6-kube-api-access-7gvmz\") pod \"cert-manager-cainjector-7f985d654d-s7jbr\" (UID: \"4fba61c9-f590-402c-a1b1-2a861a4b1bb6\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-s7jbr" Dec 01 18:43:43 crc kubenswrapper[4935]: I1201 18:43:43.950218 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gvmz\" (UniqueName: \"kubernetes.io/projected/4fba61c9-f590-402c-a1b1-2a861a4b1bb6-kube-api-access-7gvmz\") pod \"cert-manager-cainjector-7f985d654d-s7jbr\" (UID: \"4fba61c9-f590-402c-a1b1-2a861a4b1bb6\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-s7jbr" Dec 01 18:43:44 crc kubenswrapper[4935]: I1201 18:43:44.017410 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvdk6\" (UniqueName: \"kubernetes.io/projected/1ee1742c-1268-40ba-a472-ca0184dd5fae-kube-api-access-nvdk6\") pod \"cert-manager-webhook-5655c58dd6-k9tw2\" (UID: \"1ee1742c-1268-40ba-a472-ca0184dd5fae\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-k9tw2" Dec 01 18:43:44 crc kubenswrapper[4935]: I1201 18:43:44.017465 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr82g\" (UniqueName: \"kubernetes.io/projected/54039121-127f-4ce7-b6d9-3dff080bc6ae-kube-api-access-zr82g\") pod \"cert-manager-5b446d88c5-7hwdt\" (UID: \"54039121-127f-4ce7-b6d9-3dff080bc6ae\") " pod="cert-manager/cert-manager-5b446d88c5-7hwdt" Dec 01 18:43:44 crc kubenswrapper[4935]: I1201 18:43:44.034378 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvdk6\" (UniqueName: \"kubernetes.io/projected/1ee1742c-1268-40ba-a472-ca0184dd5fae-kube-api-access-nvdk6\") pod \"cert-manager-webhook-5655c58dd6-k9tw2\" (UID: \"1ee1742c-1268-40ba-a472-ca0184dd5fae\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-k9tw2" Dec 01 18:43:44 crc kubenswrapper[4935]: I1201 18:43:44.035979 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr82g\" (UniqueName: \"kubernetes.io/projected/54039121-127f-4ce7-b6d9-3dff080bc6ae-kube-api-access-zr82g\") pod \"cert-manager-5b446d88c5-7hwdt\" (UID: \"54039121-127f-4ce7-b6d9-3dff080bc6ae\") " pod="cert-manager/cert-manager-5b446d88c5-7hwdt" Dec 01 18:43:44 crc kubenswrapper[4935]: I1201 18:43:44.088022 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-s7jbr" Dec 01 18:43:44 crc kubenswrapper[4935]: I1201 18:43:44.127520 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-7hwdt" Dec 01 18:43:44 crc kubenswrapper[4935]: I1201 18:43:44.139670 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-k9tw2" Dec 01 18:43:44 crc kubenswrapper[4935]: I1201 18:43:44.520301 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-k9tw2"] Dec 01 18:43:44 crc kubenswrapper[4935]: I1201 18:43:44.536406 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-s7jbr"] Dec 01 18:43:44 crc kubenswrapper[4935]: I1201 18:43:44.715721 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-7hwdt"] Dec 01 18:43:45 crc kubenswrapper[4935]: I1201 18:43:45.278615 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-k9tw2" event={"ID":"1ee1742c-1268-40ba-a472-ca0184dd5fae","Type":"ContainerStarted","Data":"7f954c00d646c6923331bf99626537f96308c19fb2f9d07d80e24dfe45fec0c5"} Dec 01 18:43:45 crc kubenswrapper[4935]: I1201 18:43:45.280312 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-s7jbr" event={"ID":"4fba61c9-f590-402c-a1b1-2a861a4b1bb6","Type":"ContainerStarted","Data":"eb77f8f3380341dd4ac0fc78b8b2abb2843ca3dc0b9d22c359a1760c079e3252"} Dec 01 18:43:45 crc kubenswrapper[4935]: I1201 18:43:45.281408 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-7hwdt" event={"ID":"54039121-127f-4ce7-b6d9-3dff080bc6ae","Type":"ContainerStarted","Data":"a0dc1388128c0e7e66adf836ec06a532fe6e4947ebc9925f1940a90d161ef6bb"} Dec 01 18:43:47 crc kubenswrapper[4935]: I1201 18:43:47.294847 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-4fc9p" event={"ID":"c4d2ca33-292e-45b0-b5b0-972516b76b0d","Type":"ContainerStarted","Data":"3554860827bc3517e6e5157b49172e4b192a45bb3880b6bc69272035b5dd5768"} Dec 01 18:43:48 crc kubenswrapper[4935]: I1201 18:43:48.321702 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-4fc9p" podStartSLOduration=11.621276291000001 podStartE2EDuration="49.321679866s" podCreationTimestamp="2025-12-01 18:42:59 +0000 UTC" firstStartedPulling="2025-12-01 18:43:00.528253747 +0000 UTC m=+794.549883006" lastFinishedPulling="2025-12-01 18:43:38.228657302 +0000 UTC m=+832.250286581" observedRunningTime="2025-12-01 18:43:48.319462776 +0000 UTC m=+842.341092035" watchObservedRunningTime="2025-12-01 18:43:48.321679866 +0000 UTC m=+842.343309135" Dec 01 18:43:57 crc kubenswrapper[4935]: I1201 18:43:57.372803 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-k9tw2" event={"ID":"1ee1742c-1268-40ba-a472-ca0184dd5fae","Type":"ContainerStarted","Data":"442cdc5c51912dd0506548b38aa20effcf68075a6c318a9e8f998e15d3a26fd9"} Dec 01 18:43:57 crc kubenswrapper[4935]: I1201 18:43:57.374563 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-s7jbr" event={"ID":"4fba61c9-f590-402c-a1b1-2a861a4b1bb6","Type":"ContainerStarted","Data":"41e2dd6c64b814cb2b8c4318ee7e0ad6a881db2be57d0ab7d2212d60dbbe7319"} Dec 01 18:43:58 crc kubenswrapper[4935]: I1201 18:43:58.395864 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-s7jbr" podStartSLOduration=2.919409177 podStartE2EDuration="15.395848715s" podCreationTimestamp="2025-12-01 18:43:43 +0000 UTC" firstStartedPulling="2025-12-01 18:43:44.539932149 +0000 UTC m=+838.561561408" lastFinishedPulling="2025-12-01 18:43:57.016371677 +0000 UTC m=+851.038000946" observedRunningTime="2025-12-01 18:43:58.394060097 +0000 UTC m=+852.415689366" watchObservedRunningTime="2025-12-01 18:43:58.395848715 +0000 UTC m=+852.417477974" Dec 01 18:43:58 crc kubenswrapper[4935]: I1201 18:43:58.418979 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-k9tw2" podStartSLOduration=2.917736022 podStartE2EDuration="15.418958999s" podCreationTimestamp="2025-12-01 18:43:43 +0000 UTC" firstStartedPulling="2025-12-01 18:43:44.520805064 +0000 UTC m=+838.542434323" lastFinishedPulling="2025-12-01 18:43:57.022028031 +0000 UTC m=+851.043657300" observedRunningTime="2025-12-01 18:43:58.415338641 +0000 UTC m=+852.436967890" watchObservedRunningTime="2025-12-01 18:43:58.418958999 +0000 UTC m=+852.440588258" Dec 01 18:43:59 crc kubenswrapper[4935]: I1201 18:43:59.140841 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-k9tw2" Dec 01 18:44:00 crc kubenswrapper[4935]: I1201 18:44:00.398178 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-7hwdt" event={"ID":"54039121-127f-4ce7-b6d9-3dff080bc6ae","Type":"ContainerStarted","Data":"0ded9a9ea4130f724a9b250346bc74bc7827e1eb7970ea8b3a2728823614be65"} Dec 01 18:44:00 crc kubenswrapper[4935]: I1201 18:44:00.418524 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-7hwdt" podStartSLOduration=2.769755431 podStartE2EDuration="17.418505325s" podCreationTimestamp="2025-12-01 18:43:43 +0000 UTC" firstStartedPulling="2025-12-01 18:43:44.721696873 +0000 UTC m=+838.743326122" lastFinishedPulling="2025-12-01 18:43:59.370446727 +0000 UTC m=+853.392076016" observedRunningTime="2025-12-01 18:44:00.414758133 +0000 UTC m=+854.436387392" watchObservedRunningTime="2025-12-01 18:44:00.418505325 +0000 UTC m=+854.440134584" Dec 01 18:44:04 crc kubenswrapper[4935]: I1201 18:44:04.144657 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-k9tw2" Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.576623 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj"] Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.578576 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.580951 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.584903 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj\" (UID: \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.584960 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj\" (UID: \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.585130 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5np6\" (UniqueName: \"kubernetes.io/projected/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-kube-api-access-m5np6\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj\" (UID: \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.586261 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj"] Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.685936 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5np6\" (UniqueName: \"kubernetes.io/projected/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-kube-api-access-m5np6\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj\" (UID: \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.686517 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj\" (UID: \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.686773 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj\" (UID: \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.687299 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj\" (UID: \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.687441 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj\" (UID: \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.712593 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5np6\" (UniqueName: \"kubernetes.io/projected/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-kube-api-access-m5np6\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj\" (UID: \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.761427 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796"] Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.762907 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" Dec 01 18:44:30 crc kubenswrapper[4935]: I1201 18:44:30.784336 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796"] Dec 01 18:44:31 crc kubenswrapper[4935]: I1201 18:44:30.889056 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84bzp\" (UniqueName: \"kubernetes.io/projected/da239c9b-9e88-43ab-8967-36662c93340c-kube-api-access-84bzp\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796\" (UID: \"da239c9b-9e88-43ab-8967-36662c93340c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" Dec 01 18:44:31 crc kubenswrapper[4935]: I1201 18:44:30.889108 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/da239c9b-9e88-43ab-8967-36662c93340c-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796\" (UID: \"da239c9b-9e88-43ab-8967-36662c93340c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" Dec 01 18:44:31 crc kubenswrapper[4935]: I1201 18:44:30.889172 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/da239c9b-9e88-43ab-8967-36662c93340c-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796\" (UID: \"da239c9b-9e88-43ab-8967-36662c93340c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" Dec 01 18:44:31 crc kubenswrapper[4935]: I1201 18:44:30.897883 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" Dec 01 18:44:31 crc kubenswrapper[4935]: I1201 18:44:30.992521 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/da239c9b-9e88-43ab-8967-36662c93340c-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796\" (UID: \"da239c9b-9e88-43ab-8967-36662c93340c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" Dec 01 18:44:31 crc kubenswrapper[4935]: I1201 18:44:30.992983 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84bzp\" (UniqueName: \"kubernetes.io/projected/da239c9b-9e88-43ab-8967-36662c93340c-kube-api-access-84bzp\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796\" (UID: \"da239c9b-9e88-43ab-8967-36662c93340c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" Dec 01 18:44:31 crc kubenswrapper[4935]: I1201 18:44:30.993017 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/da239c9b-9e88-43ab-8967-36662c93340c-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796\" (UID: \"da239c9b-9e88-43ab-8967-36662c93340c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" Dec 01 18:44:31 crc kubenswrapper[4935]: I1201 18:44:30.993508 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/da239c9b-9e88-43ab-8967-36662c93340c-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796\" (UID: \"da239c9b-9e88-43ab-8967-36662c93340c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" Dec 01 18:44:31 crc kubenswrapper[4935]: I1201 18:44:30.993528 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/da239c9b-9e88-43ab-8967-36662c93340c-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796\" (UID: \"da239c9b-9e88-43ab-8967-36662c93340c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" Dec 01 18:44:31 crc kubenswrapper[4935]: I1201 18:44:31.026082 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84bzp\" (UniqueName: \"kubernetes.io/projected/da239c9b-9e88-43ab-8967-36662c93340c-kube-api-access-84bzp\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796\" (UID: \"da239c9b-9e88-43ab-8967-36662c93340c\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" Dec 01 18:44:31 crc kubenswrapper[4935]: I1201 18:44:31.080469 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" Dec 01 18:44:31 crc kubenswrapper[4935]: I1201 18:44:31.760633 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796"] Dec 01 18:44:31 crc kubenswrapper[4935]: I1201 18:44:31.777142 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj"] Dec 01 18:44:31 crc kubenswrapper[4935]: W1201 18:44:31.789304 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1a58ea7_d1ba_46f8_aef1_784fd0b59622.slice/crio-9c7b637456d6271a6e5a70893061ce5fd685b5b4e5f415f110ca4d3149d7f953 WatchSource:0}: Error finding container 9c7b637456d6271a6e5a70893061ce5fd685b5b4e5f415f110ca4d3149d7f953: Status 404 returned error can't find the container with id 9c7b637456d6271a6e5a70893061ce5fd685b5b4e5f415f110ca4d3149d7f953 Dec 01 18:44:32 crc kubenswrapper[4935]: I1201 18:44:32.681529 4935 generic.go:334] "Generic (PLEG): container finished" podID="da239c9b-9e88-43ab-8967-36662c93340c" containerID="39066d34ad2775a837cc1c343899011ba4ba930f640e1b393bdc88ac8f6cfa6b" exitCode=0 Dec 01 18:44:32 crc kubenswrapper[4935]: I1201 18:44:32.682070 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" event={"ID":"da239c9b-9e88-43ab-8967-36662c93340c","Type":"ContainerDied","Data":"39066d34ad2775a837cc1c343899011ba4ba930f640e1b393bdc88ac8f6cfa6b"} Dec 01 18:44:32 crc kubenswrapper[4935]: I1201 18:44:32.682114 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" event={"ID":"da239c9b-9e88-43ab-8967-36662c93340c","Type":"ContainerStarted","Data":"8e068f16e9ba7b0b545f0a5b57719b1573d69224990bf098a085563a491c8d79"} Dec 01 18:44:32 crc kubenswrapper[4935]: I1201 18:44:32.684251 4935 generic.go:334] "Generic (PLEG): container finished" podID="e1a58ea7-d1ba-46f8-aef1-784fd0b59622" containerID="ca140e71262a1d99b6f2e536eba99222e0e88aca082ec7126e765fed5c027001" exitCode=0 Dec 01 18:44:32 crc kubenswrapper[4935]: I1201 18:44:32.684277 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" event={"ID":"e1a58ea7-d1ba-46f8-aef1-784fd0b59622","Type":"ContainerDied","Data":"ca140e71262a1d99b6f2e536eba99222e0e88aca082ec7126e765fed5c027001"} Dec 01 18:44:32 crc kubenswrapper[4935]: I1201 18:44:32.684295 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" event={"ID":"e1a58ea7-d1ba-46f8-aef1-784fd0b59622","Type":"ContainerStarted","Data":"9c7b637456d6271a6e5a70893061ce5fd685b5b4e5f415f110ca4d3149d7f953"} Dec 01 18:44:35 crc kubenswrapper[4935]: I1201 18:44:35.708409 4935 generic.go:334] "Generic (PLEG): container finished" podID="da239c9b-9e88-43ab-8967-36662c93340c" containerID="3a6d2c48a894e0e5e9fdd8f91a59264404c4172ba840d51be247e13fb56b09df" exitCode=0 Dec 01 18:44:35 crc kubenswrapper[4935]: I1201 18:44:35.708621 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" event={"ID":"da239c9b-9e88-43ab-8967-36662c93340c","Type":"ContainerDied","Data":"3a6d2c48a894e0e5e9fdd8f91a59264404c4172ba840d51be247e13fb56b09df"} Dec 01 18:44:35 crc kubenswrapper[4935]: I1201 18:44:35.712202 4935 generic.go:334] "Generic (PLEG): container finished" podID="e1a58ea7-d1ba-46f8-aef1-784fd0b59622" containerID="84f785e8724552d407c7d2729c747ade3673736c563b594aacd2d54173261501" exitCode=0 Dec 01 18:44:35 crc kubenswrapper[4935]: I1201 18:44:35.712235 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" event={"ID":"e1a58ea7-d1ba-46f8-aef1-784fd0b59622","Type":"ContainerDied","Data":"84f785e8724552d407c7d2729c747ade3673736c563b594aacd2d54173261501"} Dec 01 18:44:36 crc kubenswrapper[4935]: I1201 18:44:36.721242 4935 generic.go:334] "Generic (PLEG): container finished" podID="da239c9b-9e88-43ab-8967-36662c93340c" containerID="10f1214dfb5020b5e94a3d5aa36afd8f5e920c868fae9a8c361434870f3beab6" exitCode=0 Dec 01 18:44:36 crc kubenswrapper[4935]: I1201 18:44:36.721341 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" event={"ID":"da239c9b-9e88-43ab-8967-36662c93340c","Type":"ContainerDied","Data":"10f1214dfb5020b5e94a3d5aa36afd8f5e920c868fae9a8c361434870f3beab6"} Dec 01 18:44:36 crc kubenswrapper[4935]: I1201 18:44:36.724522 4935 generic.go:334] "Generic (PLEG): container finished" podID="e1a58ea7-d1ba-46f8-aef1-784fd0b59622" containerID="29e23bfef61671c7c821f3a815e826d3754c3797188411a0771bf753e58932c2" exitCode=0 Dec 01 18:44:36 crc kubenswrapper[4935]: I1201 18:44:36.724567 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" event={"ID":"e1a58ea7-d1ba-46f8-aef1-784fd0b59622","Type":"ContainerDied","Data":"29e23bfef61671c7c821f3a815e826d3754c3797188411a0771bf753e58932c2"} Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.030268 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.038196 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.093386 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-bundle\") pod \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\" (UID: \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\") " Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.093458 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84bzp\" (UniqueName: \"kubernetes.io/projected/da239c9b-9e88-43ab-8967-36662c93340c-kube-api-access-84bzp\") pod \"da239c9b-9e88-43ab-8967-36662c93340c\" (UID: \"da239c9b-9e88-43ab-8967-36662c93340c\") " Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.093474 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/da239c9b-9e88-43ab-8967-36662c93340c-util\") pod \"da239c9b-9e88-43ab-8967-36662c93340c\" (UID: \"da239c9b-9e88-43ab-8967-36662c93340c\") " Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.093525 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5np6\" (UniqueName: \"kubernetes.io/projected/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-kube-api-access-m5np6\") pod \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\" (UID: \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\") " Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.093565 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/da239c9b-9e88-43ab-8967-36662c93340c-bundle\") pod \"da239c9b-9e88-43ab-8967-36662c93340c\" (UID: \"da239c9b-9e88-43ab-8967-36662c93340c\") " Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.093602 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-util\") pod \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\" (UID: \"e1a58ea7-d1ba-46f8-aef1-784fd0b59622\") " Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.094927 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-bundle" (OuterVolumeSpecName: "bundle") pod "e1a58ea7-d1ba-46f8-aef1-784fd0b59622" (UID: "e1a58ea7-d1ba-46f8-aef1-784fd0b59622"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.095203 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da239c9b-9e88-43ab-8967-36662c93340c-bundle" (OuterVolumeSpecName: "bundle") pod "da239c9b-9e88-43ab-8967-36662c93340c" (UID: "da239c9b-9e88-43ab-8967-36662c93340c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.102807 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-kube-api-access-m5np6" (OuterVolumeSpecName: "kube-api-access-m5np6") pod "e1a58ea7-d1ba-46f8-aef1-784fd0b59622" (UID: "e1a58ea7-d1ba-46f8-aef1-784fd0b59622"). InnerVolumeSpecName "kube-api-access-m5np6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.102919 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da239c9b-9e88-43ab-8967-36662c93340c-kube-api-access-84bzp" (OuterVolumeSpecName: "kube-api-access-84bzp") pod "da239c9b-9e88-43ab-8967-36662c93340c" (UID: "da239c9b-9e88-43ab-8967-36662c93340c"). InnerVolumeSpecName "kube-api-access-84bzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.104443 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-util" (OuterVolumeSpecName: "util") pod "e1a58ea7-d1ba-46f8-aef1-784fd0b59622" (UID: "e1a58ea7-d1ba-46f8-aef1-784fd0b59622"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.116698 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da239c9b-9e88-43ab-8967-36662c93340c-util" (OuterVolumeSpecName: "util") pod "da239c9b-9e88-43ab-8967-36662c93340c" (UID: "da239c9b-9e88-43ab-8967-36662c93340c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.195130 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84bzp\" (UniqueName: \"kubernetes.io/projected/da239c9b-9e88-43ab-8967-36662c93340c-kube-api-access-84bzp\") on node \"crc\" DevicePath \"\"" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.195683 4935 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/da239c9b-9e88-43ab-8967-36662c93340c-util\") on node \"crc\" DevicePath \"\"" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.195701 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5np6\" (UniqueName: \"kubernetes.io/projected/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-kube-api-access-m5np6\") on node \"crc\" DevicePath \"\"" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.195712 4935 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/da239c9b-9e88-43ab-8967-36662c93340c-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.195722 4935 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-util\") on node \"crc\" DevicePath \"\"" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.195734 4935 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e1a58ea7-d1ba-46f8-aef1-784fd0b59622-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.743869 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" event={"ID":"da239c9b-9e88-43ab-8967-36662c93340c","Type":"ContainerDied","Data":"8e068f16e9ba7b0b545f0a5b57719b1573d69224990bf098a085563a491c8d79"} Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.744322 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e068f16e9ba7b0b545f0a5b57719b1573d69224990bf098a085563a491c8d79" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.743909 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.748490 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" event={"ID":"e1a58ea7-d1ba-46f8-aef1-784fd0b59622","Type":"ContainerDied","Data":"9c7b637456d6271a6e5a70893061ce5fd685b5b4e5f415f110ca4d3149d7f953"} Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.748541 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c7b637456d6271a6e5a70893061ce5fd685b5b4e5f415f110ca4d3149d7f953" Dec 01 18:44:38 crc kubenswrapper[4935]: I1201 18:44:38.748622 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.424267 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg"] Dec 01 18:44:47 crc kubenswrapper[4935]: E1201 18:44:47.426504 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1a58ea7-d1ba-46f8-aef1-784fd0b59622" containerName="util" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.426606 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1a58ea7-d1ba-46f8-aef1-784fd0b59622" containerName="util" Dec 01 18:44:47 crc kubenswrapper[4935]: E1201 18:44:47.426688 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1a58ea7-d1ba-46f8-aef1-784fd0b59622" containerName="pull" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.426758 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1a58ea7-d1ba-46f8-aef1-784fd0b59622" containerName="pull" Dec 01 18:44:47 crc kubenswrapper[4935]: E1201 18:44:47.426833 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1a58ea7-d1ba-46f8-aef1-784fd0b59622" containerName="extract" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.426900 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1a58ea7-d1ba-46f8-aef1-784fd0b59622" containerName="extract" Dec 01 18:44:47 crc kubenswrapper[4935]: E1201 18:44:47.426974 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da239c9b-9e88-43ab-8967-36662c93340c" containerName="util" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.427051 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="da239c9b-9e88-43ab-8967-36662c93340c" containerName="util" Dec 01 18:44:47 crc kubenswrapper[4935]: E1201 18:44:47.427130 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da239c9b-9e88-43ab-8967-36662c93340c" containerName="extract" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.427235 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="da239c9b-9e88-43ab-8967-36662c93340c" containerName="extract" Dec 01 18:44:47 crc kubenswrapper[4935]: E1201 18:44:47.427311 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da239c9b-9e88-43ab-8967-36662c93340c" containerName="pull" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.427376 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="da239c9b-9e88-43ab-8967-36662c93340c" containerName="pull" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.427577 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="da239c9b-9e88-43ab-8967-36662c93340c" containerName="extract" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.427668 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1a58ea7-d1ba-46f8-aef1-784fd0b59622" containerName="extract" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.428633 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.431539 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.431633 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.431744 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.431967 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.432345 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.436214 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-57m74" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.447100 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg"] Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.541615 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jkqh\" (UniqueName: \"kubernetes.io/projected/7c66dc44-1b72-4e23-9421-8f8495e7af3a-kube-api-access-6jkqh\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.541691 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/7c66dc44-1b72-4e23-9421-8f8495e7af3a-manager-config\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.541725 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7c66dc44-1b72-4e23-9421-8f8495e7af3a-apiservice-cert\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.541764 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7c66dc44-1b72-4e23-9421-8f8495e7af3a-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.541781 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7c66dc44-1b72-4e23-9421-8f8495e7af3a-webhook-cert\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.642664 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/7c66dc44-1b72-4e23-9421-8f8495e7af3a-manager-config\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.642720 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7c66dc44-1b72-4e23-9421-8f8495e7af3a-apiservice-cert\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.642757 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7c66dc44-1b72-4e23-9421-8f8495e7af3a-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.642776 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7c66dc44-1b72-4e23-9421-8f8495e7af3a-webhook-cert\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.642822 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jkqh\" (UniqueName: \"kubernetes.io/projected/7c66dc44-1b72-4e23-9421-8f8495e7af3a-kube-api-access-6jkqh\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.643950 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/7c66dc44-1b72-4e23-9421-8f8495e7af3a-manager-config\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.649535 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7c66dc44-1b72-4e23-9421-8f8495e7af3a-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.651890 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7c66dc44-1b72-4e23-9421-8f8495e7af3a-apiservice-cert\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.669516 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7c66dc44-1b72-4e23-9421-8f8495e7af3a-webhook-cert\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.669808 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jkqh\" (UniqueName: \"kubernetes.io/projected/7c66dc44-1b72-4e23-9421-8f8495e7af3a-kube-api-access-6jkqh\") pod \"loki-operator-controller-manager-59df859d46-rd5lg\" (UID: \"7c66dc44-1b72-4e23-9421-8f8495e7af3a\") " pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.746110 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:44:47 crc kubenswrapper[4935]: I1201 18:44:47.970382 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg"] Dec 01 18:44:47 crc kubenswrapper[4935]: W1201 18:44:47.980611 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c66dc44_1b72_4e23_9421_8f8495e7af3a.slice/crio-7426eac540c2d2632c9ffa68873f8f39cfa39597d9f2a8a3b8df896c8a49082d WatchSource:0}: Error finding container 7426eac540c2d2632c9ffa68873f8f39cfa39597d9f2a8a3b8df896c8a49082d: Status 404 returned error can't find the container with id 7426eac540c2d2632c9ffa68873f8f39cfa39597d9f2a8a3b8df896c8a49082d Dec 01 18:44:48 crc kubenswrapper[4935]: I1201 18:44:48.840319 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" event={"ID":"7c66dc44-1b72-4e23-9421-8f8495e7af3a","Type":"ContainerStarted","Data":"7426eac540c2d2632c9ffa68873f8f39cfa39597d9f2a8a3b8df896c8a49082d"} Dec 01 18:44:51 crc kubenswrapper[4935]: I1201 18:44:51.483667 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-92jqh"] Dec 01 18:44:51 crc kubenswrapper[4935]: I1201 18:44:51.485403 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-ff9846bd-92jqh" Dec 01 18:44:51 crc kubenswrapper[4935]: I1201 18:44:51.487832 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"openshift-service-ca.crt" Dec 01 18:44:51 crc kubenswrapper[4935]: I1201 18:44:51.488690 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"cluster-logging-operator-dockercfg-zbmdj" Dec 01 18:44:51 crc kubenswrapper[4935]: I1201 18:44:51.489895 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"kube-root-ca.crt" Dec 01 18:44:51 crc kubenswrapper[4935]: I1201 18:44:51.492725 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-92jqh"] Dec 01 18:44:51 crc kubenswrapper[4935]: I1201 18:44:51.608091 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz89l\" (UniqueName: \"kubernetes.io/projected/f8e2ad20-1223-4cd9-bfe7-72bbc774226f-kube-api-access-rz89l\") pod \"cluster-logging-operator-ff9846bd-92jqh\" (UID: \"f8e2ad20-1223-4cd9-bfe7-72bbc774226f\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-92jqh" Dec 01 18:44:51 crc kubenswrapper[4935]: I1201 18:44:51.709232 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz89l\" (UniqueName: \"kubernetes.io/projected/f8e2ad20-1223-4cd9-bfe7-72bbc774226f-kube-api-access-rz89l\") pod \"cluster-logging-operator-ff9846bd-92jqh\" (UID: \"f8e2ad20-1223-4cd9-bfe7-72bbc774226f\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-92jqh" Dec 01 18:44:51 crc kubenswrapper[4935]: I1201 18:44:51.730025 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz89l\" (UniqueName: \"kubernetes.io/projected/f8e2ad20-1223-4cd9-bfe7-72bbc774226f-kube-api-access-rz89l\") pod \"cluster-logging-operator-ff9846bd-92jqh\" (UID: \"f8e2ad20-1223-4cd9-bfe7-72bbc774226f\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-92jqh" Dec 01 18:44:51 crc kubenswrapper[4935]: I1201 18:44:51.804791 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-ff9846bd-92jqh" Dec 01 18:44:52 crc kubenswrapper[4935]: I1201 18:44:52.536882 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lwjzx"] Dec 01 18:44:52 crc kubenswrapper[4935]: I1201 18:44:52.538010 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:44:52 crc kubenswrapper[4935]: I1201 18:44:52.561117 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lwjzx"] Dec 01 18:44:52 crc kubenswrapper[4935]: I1201 18:44:52.623514 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fxws\" (UniqueName: \"kubernetes.io/projected/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-kube-api-access-6fxws\") pod \"community-operators-lwjzx\" (UID: \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\") " pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:44:52 crc kubenswrapper[4935]: I1201 18:44:52.623557 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-utilities\") pod \"community-operators-lwjzx\" (UID: \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\") " pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:44:52 crc kubenswrapper[4935]: I1201 18:44:52.623631 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-catalog-content\") pod \"community-operators-lwjzx\" (UID: \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\") " pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:44:52 crc kubenswrapper[4935]: I1201 18:44:52.724767 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fxws\" (UniqueName: \"kubernetes.io/projected/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-kube-api-access-6fxws\") pod \"community-operators-lwjzx\" (UID: \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\") " pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:44:52 crc kubenswrapper[4935]: I1201 18:44:52.724821 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-utilities\") pod \"community-operators-lwjzx\" (UID: \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\") " pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:44:52 crc kubenswrapper[4935]: I1201 18:44:52.724880 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-catalog-content\") pod \"community-operators-lwjzx\" (UID: \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\") " pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:44:52 crc kubenswrapper[4935]: I1201 18:44:52.725409 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-catalog-content\") pod \"community-operators-lwjzx\" (UID: \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\") " pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:44:52 crc kubenswrapper[4935]: I1201 18:44:52.725984 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-utilities\") pod \"community-operators-lwjzx\" (UID: \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\") " pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:44:52 crc kubenswrapper[4935]: I1201 18:44:52.751793 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fxws\" (UniqueName: \"kubernetes.io/projected/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-kube-api-access-6fxws\") pod \"community-operators-lwjzx\" (UID: \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\") " pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:44:52 crc kubenswrapper[4935]: I1201 18:44:52.853521 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:44:54 crc kubenswrapper[4935]: I1201 18:44:54.323917 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lwjzx"] Dec 01 18:44:54 crc kubenswrapper[4935]: I1201 18:44:54.482574 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-92jqh"] Dec 01 18:44:54 crc kubenswrapper[4935]: I1201 18:44:54.891824 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" event={"ID":"7c66dc44-1b72-4e23-9421-8f8495e7af3a","Type":"ContainerStarted","Data":"639d502bd0f84ec1b05ee8051e002ab16f74575476bc08f5d1bfb2f953bea5f6"} Dec 01 18:44:54 crc kubenswrapper[4935]: I1201 18:44:54.894802 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-ff9846bd-92jqh" event={"ID":"f8e2ad20-1223-4cd9-bfe7-72bbc774226f","Type":"ContainerStarted","Data":"1d45b960d4d84838af04f270ded4839ef2541a71d32abf0108f3d2e53f8c5d9f"} Dec 01 18:44:54 crc kubenswrapper[4935]: I1201 18:44:54.896556 4935 generic.go:334] "Generic (PLEG): container finished" podID="86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" containerID="bad49698878e622bec5932ca4ae406af500c2cb86b4810b43f541a85b43b5c22" exitCode=0 Dec 01 18:44:54 crc kubenswrapper[4935]: I1201 18:44:54.896596 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwjzx" event={"ID":"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c","Type":"ContainerDied","Data":"bad49698878e622bec5932ca4ae406af500c2cb86b4810b43f541a85b43b5c22"} Dec 01 18:44:54 crc kubenswrapper[4935]: I1201 18:44:54.896616 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwjzx" event={"ID":"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c","Type":"ContainerStarted","Data":"92b43ad6b4a48786621ec10ccbc9be65f33067cfaa543c87e3f236f8d8ed284b"} Dec 01 18:44:55 crc kubenswrapper[4935]: I1201 18:44:55.908335 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwjzx" event={"ID":"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c","Type":"ContainerStarted","Data":"a4c0c38126834d6d28da174cbdb448bdf43d91a8f40842777f78ff9429f75fbc"} Dec 01 18:44:56 crc kubenswrapper[4935]: I1201 18:44:56.923865 4935 generic.go:334] "Generic (PLEG): container finished" podID="86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" containerID="a4c0c38126834d6d28da174cbdb448bdf43d91a8f40842777f78ff9429f75fbc" exitCode=0 Dec 01 18:44:56 crc kubenswrapper[4935]: I1201 18:44:56.923929 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwjzx" event={"ID":"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c","Type":"ContainerDied","Data":"a4c0c38126834d6d28da174cbdb448bdf43d91a8f40842777f78ff9429f75fbc"} Dec 01 18:44:57 crc kubenswrapper[4935]: I1201 18:44:57.933896 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwjzx" event={"ID":"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c","Type":"ContainerStarted","Data":"cbb0ac4da44f4bc8eab8622cd4bc28486b44c3e56b84ce4ad6f8994099f2b0ec"} Dec 01 18:44:57 crc kubenswrapper[4935]: I1201 18:44:57.959114 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lwjzx" podStartSLOduration=3.341259144 podStartE2EDuration="5.959094844s" podCreationTimestamp="2025-12-01 18:44:52 +0000 UTC" firstStartedPulling="2025-12-01 18:44:54.898251118 +0000 UTC m=+908.919880407" lastFinishedPulling="2025-12-01 18:44:57.516086858 +0000 UTC m=+911.537716107" observedRunningTime="2025-12-01 18:44:57.950581574 +0000 UTC m=+911.972210833" watchObservedRunningTime="2025-12-01 18:44:57.959094844 +0000 UTC m=+911.980724103" Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.148118 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg"] Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.149972 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.156796 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg"] Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.161597 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.161619 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.243908 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8nld\" (UniqueName: \"kubernetes.io/projected/c0b46824-03b2-40d6-b1bf-31efd83aaf80-kube-api-access-z8nld\") pod \"collect-profiles-29410245-fgncg\" (UID: \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.243958 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0b46824-03b2-40d6-b1bf-31efd83aaf80-config-volume\") pod \"collect-profiles-29410245-fgncg\" (UID: \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.244031 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0b46824-03b2-40d6-b1bf-31efd83aaf80-secret-volume\") pod \"collect-profiles-29410245-fgncg\" (UID: \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.346179 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0b46824-03b2-40d6-b1bf-31efd83aaf80-secret-volume\") pod \"collect-profiles-29410245-fgncg\" (UID: \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.346277 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8nld\" (UniqueName: \"kubernetes.io/projected/c0b46824-03b2-40d6-b1bf-31efd83aaf80-kube-api-access-z8nld\") pod \"collect-profiles-29410245-fgncg\" (UID: \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.346679 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0b46824-03b2-40d6-b1bf-31efd83aaf80-config-volume\") pod \"collect-profiles-29410245-fgncg\" (UID: \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.347613 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0b46824-03b2-40d6-b1bf-31efd83aaf80-config-volume\") pod \"collect-profiles-29410245-fgncg\" (UID: \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.367991 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0b46824-03b2-40d6-b1bf-31efd83aaf80-secret-volume\") pod \"collect-profiles-29410245-fgncg\" (UID: \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.373629 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8nld\" (UniqueName: \"kubernetes.io/projected/c0b46824-03b2-40d6-b1bf-31efd83aaf80-kube-api-access-z8nld\") pod \"collect-profiles-29410245-fgncg\" (UID: \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" Dec 01 18:45:00 crc kubenswrapper[4935]: I1201 18:45:00.480667 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" Dec 01 18:45:02 crc kubenswrapper[4935]: I1201 18:45:02.854021 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:45:02 crc kubenswrapper[4935]: I1201 18:45:02.854327 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:45:02 crc kubenswrapper[4935]: I1201 18:45:02.906783 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:45:03 crc kubenswrapper[4935]: I1201 18:45:03.026805 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:45:04 crc kubenswrapper[4935]: I1201 18:45:04.719874 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg"] Dec 01 18:45:04 crc kubenswrapper[4935]: I1201 18:45:04.980518 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" event={"ID":"c0b46824-03b2-40d6-b1bf-31efd83aaf80","Type":"ContainerStarted","Data":"63fe31c853dc4de430febf9463ea9fe37c2b4f6b9b4e2cee0233fbaecc3ca5dd"} Dec 01 18:45:04 crc kubenswrapper[4935]: I1201 18:45:04.980894 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" event={"ID":"c0b46824-03b2-40d6-b1bf-31efd83aaf80","Type":"ContainerStarted","Data":"e81f774c18aacd1851511c2801d8fc83df7ce6f7e3751be08669906665789fb7"} Dec 01 18:45:04 crc kubenswrapper[4935]: I1201 18:45:04.982101 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-ff9846bd-92jqh" event={"ID":"f8e2ad20-1223-4cd9-bfe7-72bbc774226f","Type":"ContainerStarted","Data":"d8beea207c7cbe72cd31cc22a0e0da7d0271dd353ccde9d32036c936c10adeff"} Dec 01 18:45:04 crc kubenswrapper[4935]: I1201 18:45:04.984721 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" event={"ID":"7c66dc44-1b72-4e23-9421-8f8495e7af3a","Type":"ContainerStarted","Data":"a189d330b3ae1b1c7fcc328b19914049fed384c3ad66dc822a51033022327851"} Dec 01 18:45:04 crc kubenswrapper[4935]: I1201 18:45:04.984987 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:45:04 crc kubenswrapper[4935]: I1201 18:45:04.987225 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" Dec 01 18:45:04 crc kubenswrapper[4935]: I1201 18:45:04.997820 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" podStartSLOduration=4.997802078 podStartE2EDuration="4.997802078s" podCreationTimestamp="2025-12-01 18:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:45:04.995232586 +0000 UTC m=+919.016861845" watchObservedRunningTime="2025-12-01 18:45:04.997802078 +0000 UTC m=+919.019431337" Dec 01 18:45:05 crc kubenswrapper[4935]: I1201 18:45:05.028394 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-59df859d46-rd5lg" podStartSLOduration=1.807901621 podStartE2EDuration="18.028372166s" podCreationTimestamp="2025-12-01 18:44:47 +0000 UTC" firstStartedPulling="2025-12-01 18:44:47.982845021 +0000 UTC m=+902.004474290" lastFinishedPulling="2025-12-01 18:45:04.203315576 +0000 UTC m=+918.224944835" observedRunningTime="2025-12-01 18:45:05.019842837 +0000 UTC m=+919.041472106" watchObservedRunningTime="2025-12-01 18:45:05.028372166 +0000 UTC m=+919.050001435" Dec 01 18:45:05 crc kubenswrapper[4935]: I1201 18:45:05.043706 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/cluster-logging-operator-ff9846bd-92jqh" podStartSLOduration=4.398662462 podStartE2EDuration="14.043690502s" podCreationTimestamp="2025-12-01 18:44:51 +0000 UTC" firstStartedPulling="2025-12-01 18:44:54.495304141 +0000 UTC m=+908.516933390" lastFinishedPulling="2025-12-01 18:45:04.140332161 +0000 UTC m=+918.161961430" observedRunningTime="2025-12-01 18:45:05.041374129 +0000 UTC m=+919.063003388" watchObservedRunningTime="2025-12-01 18:45:05.043690502 +0000 UTC m=+919.065319761" Dec 01 18:45:05 crc kubenswrapper[4935]: I1201 18:45:05.992324 4935 generic.go:334] "Generic (PLEG): container finished" podID="c0b46824-03b2-40d6-b1bf-31efd83aaf80" containerID="63fe31c853dc4de430febf9463ea9fe37c2b4f6b9b4e2cee0233fbaecc3ca5dd" exitCode=0 Dec 01 18:45:05 crc kubenswrapper[4935]: I1201 18:45:05.992397 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" event={"ID":"c0b46824-03b2-40d6-b1bf-31efd83aaf80","Type":"ContainerDied","Data":"63fe31c853dc4de430febf9463ea9fe37c2b4f6b9b4e2cee0233fbaecc3ca5dd"} Dec 01 18:45:06 crc kubenswrapper[4935]: I1201 18:45:06.528333 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lwjzx"] Dec 01 18:45:06 crc kubenswrapper[4935]: I1201 18:45:06.528619 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lwjzx" podUID="86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" containerName="registry-server" containerID="cri-o://cbb0ac4da44f4bc8eab8622cd4bc28486b44c3e56b84ce4ad6f8994099f2b0ec" gracePeriod=2 Dec 01 18:45:07 crc kubenswrapper[4935]: I1201 18:45:07.310625 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" Dec 01 18:45:07 crc kubenswrapper[4935]: I1201 18:45:07.462564 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0b46824-03b2-40d6-b1bf-31efd83aaf80-config-volume\") pod \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\" (UID: \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\") " Dec 01 18:45:07 crc kubenswrapper[4935]: I1201 18:45:07.462640 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0b46824-03b2-40d6-b1bf-31efd83aaf80-secret-volume\") pod \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\" (UID: \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\") " Dec 01 18:45:07 crc kubenswrapper[4935]: I1201 18:45:07.462786 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8nld\" (UniqueName: \"kubernetes.io/projected/c0b46824-03b2-40d6-b1bf-31efd83aaf80-kube-api-access-z8nld\") pod \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\" (UID: \"c0b46824-03b2-40d6-b1bf-31efd83aaf80\") " Dec 01 18:45:07 crc kubenswrapper[4935]: I1201 18:45:07.463419 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0b46824-03b2-40d6-b1bf-31efd83aaf80-config-volume" (OuterVolumeSpecName: "config-volume") pod "c0b46824-03b2-40d6-b1bf-31efd83aaf80" (UID: "c0b46824-03b2-40d6-b1bf-31efd83aaf80"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:45:07 crc kubenswrapper[4935]: I1201 18:45:07.468715 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0b46824-03b2-40d6-b1bf-31efd83aaf80-kube-api-access-z8nld" (OuterVolumeSpecName: "kube-api-access-z8nld") pod "c0b46824-03b2-40d6-b1bf-31efd83aaf80" (UID: "c0b46824-03b2-40d6-b1bf-31efd83aaf80"). InnerVolumeSpecName "kube-api-access-z8nld". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:45:07 crc kubenswrapper[4935]: I1201 18:45:07.469327 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0b46824-03b2-40d6-b1bf-31efd83aaf80-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c0b46824-03b2-40d6-b1bf-31efd83aaf80" (UID: "c0b46824-03b2-40d6-b1bf-31efd83aaf80"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:45:07 crc kubenswrapper[4935]: I1201 18:45:07.565036 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8nld\" (UniqueName: \"kubernetes.io/projected/c0b46824-03b2-40d6-b1bf-31efd83aaf80-kube-api-access-z8nld\") on node \"crc\" DevicePath \"\"" Dec 01 18:45:07 crc kubenswrapper[4935]: I1201 18:45:07.565081 4935 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c0b46824-03b2-40d6-b1bf-31efd83aaf80-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 18:45:07 crc kubenswrapper[4935]: I1201 18:45:07.565096 4935 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c0b46824-03b2-40d6-b1bf-31efd83aaf80-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 18:45:08 crc kubenswrapper[4935]: I1201 18:45:08.007577 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" event={"ID":"c0b46824-03b2-40d6-b1bf-31efd83aaf80","Type":"ContainerDied","Data":"e81f774c18aacd1851511c2801d8fc83df7ce6f7e3751be08669906665789fb7"} Dec 01 18:45:08 crc kubenswrapper[4935]: I1201 18:45:08.007634 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e81f774c18aacd1851511c2801d8fc83df7ce6f7e3751be08669906665789fb7" Dec 01 18:45:08 crc kubenswrapper[4935]: I1201 18:45:08.007605 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg" Dec 01 18:45:08 crc kubenswrapper[4935]: I1201 18:45:08.010440 4935 generic.go:334] "Generic (PLEG): container finished" podID="86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" containerID="cbb0ac4da44f4bc8eab8622cd4bc28486b44c3e56b84ce4ad6f8994099f2b0ec" exitCode=0 Dec 01 18:45:08 crc kubenswrapper[4935]: I1201 18:45:08.010469 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwjzx" event={"ID":"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c","Type":"ContainerDied","Data":"cbb0ac4da44f4bc8eab8622cd4bc28486b44c3e56b84ce4ad6f8994099f2b0ec"} Dec 01 18:45:09 crc kubenswrapper[4935]: I1201 18:45:09.738329 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Dec 01 18:45:09 crc kubenswrapper[4935]: E1201 18:45:09.739314 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0b46824-03b2-40d6-b1bf-31efd83aaf80" containerName="collect-profiles" Dec 01 18:45:09 crc kubenswrapper[4935]: I1201 18:45:09.739347 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0b46824-03b2-40d6-b1bf-31efd83aaf80" containerName="collect-profiles" Dec 01 18:45:09 crc kubenswrapper[4935]: I1201 18:45:09.739626 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0b46824-03b2-40d6-b1bf-31efd83aaf80" containerName="collect-profiles" Dec 01 18:45:09 crc kubenswrapper[4935]: I1201 18:45:09.740528 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 01 18:45:09 crc kubenswrapper[4935]: I1201 18:45:09.742948 4935 reflector.go:368] Caches populated for *v1.Secret from object-"minio-dev"/"default-dockercfg-xk6tm" Dec 01 18:45:09 crc kubenswrapper[4935]: I1201 18:45:09.744387 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Dec 01 18:45:09 crc kubenswrapper[4935]: I1201 18:45:09.744803 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Dec 01 18:45:09 crc kubenswrapper[4935]: I1201 18:45:09.761789 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 01 18:45:09 crc kubenswrapper[4935]: I1201 18:45:09.899268 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f81f5bd1-6263-480d-85d0-3b8b4e225c8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f81f5bd1-6263-480d-85d0-3b8b4e225c8e\") pod \"minio\" (UID: \"16f0a03b-8c6d-4311-b5f4-56f51600270b\") " pod="minio-dev/minio" Dec 01 18:45:09 crc kubenswrapper[4935]: I1201 18:45:09.899435 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgn4k\" (UniqueName: \"kubernetes.io/projected/16f0a03b-8c6d-4311-b5f4-56f51600270b-kube-api-access-tgn4k\") pod \"minio\" (UID: \"16f0a03b-8c6d-4311-b5f4-56f51600270b\") " pod="minio-dev/minio" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.001171 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgn4k\" (UniqueName: \"kubernetes.io/projected/16f0a03b-8c6d-4311-b5f4-56f51600270b-kube-api-access-tgn4k\") pod \"minio\" (UID: \"16f0a03b-8c6d-4311-b5f4-56f51600270b\") " pod="minio-dev/minio" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.001603 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f81f5bd1-6263-480d-85d0-3b8b4e225c8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f81f5bd1-6263-480d-85d0-3b8b4e225c8e\") pod \"minio\" (UID: \"16f0a03b-8c6d-4311-b5f4-56f51600270b\") " pod="minio-dev/minio" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.006196 4935 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.006307 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f81f5bd1-6263-480d-85d0-3b8b4e225c8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f81f5bd1-6263-480d-85d0-3b8b4e225c8e\") pod \"minio\" (UID: \"16f0a03b-8c6d-4311-b5f4-56f51600270b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/3dd415f8acbedd39dcf8790054b3efd17efc70fda81b87518746060ed6d44ecf/globalmount\"" pod="minio-dev/minio" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.038811 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgn4k\" (UniqueName: \"kubernetes.io/projected/16f0a03b-8c6d-4311-b5f4-56f51600270b-kube-api-access-tgn4k\") pod \"minio\" (UID: \"16f0a03b-8c6d-4311-b5f4-56f51600270b\") " pod="minio-dev/minio" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.058762 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f81f5bd1-6263-480d-85d0-3b8b4e225c8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f81f5bd1-6263-480d-85d0-3b8b4e225c8e\") pod \"minio\" (UID: \"16f0a03b-8c6d-4311-b5f4-56f51600270b\") " pod="minio-dev/minio" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.087836 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.178638 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.305492 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-catalog-content\") pod \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\" (UID: \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\") " Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.305779 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-utilities\") pod \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\" (UID: \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\") " Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.305870 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fxws\" (UniqueName: \"kubernetes.io/projected/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-kube-api-access-6fxws\") pod \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\" (UID: \"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c\") " Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.308005 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-utilities" (OuterVolumeSpecName: "utilities") pod "86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" (UID: "86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.310154 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-kube-api-access-6fxws" (OuterVolumeSpecName: "kube-api-access-6fxws") pod "86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" (UID: "86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c"). InnerVolumeSpecName "kube-api-access-6fxws". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.373447 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" (UID: "86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.407799 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fxws\" (UniqueName: \"kubernetes.io/projected/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-kube-api-access-6fxws\") on node \"crc\" DevicePath \"\"" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.407842 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.407852 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:45:10 crc kubenswrapper[4935]: I1201 18:45:10.539676 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 01 18:45:10 crc kubenswrapper[4935]: W1201 18:45:10.563397 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16f0a03b_8c6d_4311_b5f4_56f51600270b.slice/crio-d7b51579453621bd53bf8adee13208566e2ac2e8852260d803cf978bec59a411 WatchSource:0}: Error finding container d7b51579453621bd53bf8adee13208566e2ac2e8852260d803cf978bec59a411: Status 404 returned error can't find the container with id d7b51579453621bd53bf8adee13208566e2ac2e8852260d803cf978bec59a411 Dec 01 18:45:11 crc kubenswrapper[4935]: I1201 18:45:11.031564 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"16f0a03b-8c6d-4311-b5f4-56f51600270b","Type":"ContainerStarted","Data":"d7b51579453621bd53bf8adee13208566e2ac2e8852260d803cf978bec59a411"} Dec 01 18:45:11 crc kubenswrapper[4935]: I1201 18:45:11.033218 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lwjzx" event={"ID":"86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c","Type":"ContainerDied","Data":"92b43ad6b4a48786621ec10ccbc9be65f33067cfaa543c87e3f236f8d8ed284b"} Dec 01 18:45:11 crc kubenswrapper[4935]: I1201 18:45:11.033269 4935 scope.go:117] "RemoveContainer" containerID="cbb0ac4da44f4bc8eab8622cd4bc28486b44c3e56b84ce4ad6f8994099f2b0ec" Dec 01 18:45:11 crc kubenswrapper[4935]: I1201 18:45:11.033332 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lwjzx" Dec 01 18:45:11 crc kubenswrapper[4935]: I1201 18:45:11.057446 4935 scope.go:117] "RemoveContainer" containerID="a4c0c38126834d6d28da174cbdb448bdf43d91a8f40842777f78ff9429f75fbc" Dec 01 18:45:11 crc kubenswrapper[4935]: I1201 18:45:11.070208 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lwjzx"] Dec 01 18:45:11 crc kubenswrapper[4935]: I1201 18:45:11.075526 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lwjzx"] Dec 01 18:45:11 crc kubenswrapper[4935]: I1201 18:45:11.080218 4935 scope.go:117] "RemoveContainer" containerID="bad49698878e622bec5932ca4ae406af500c2cb86b4810b43f541a85b43b5c22" Dec 01 18:45:12 crc kubenswrapper[4935]: I1201 18:45:12.533823 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" path="/var/lib/kubelet/pods/86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c/volumes" Dec 01 18:45:15 crc kubenswrapper[4935]: I1201 18:45:15.074512 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"16f0a03b-8c6d-4311-b5f4-56f51600270b","Type":"ContainerStarted","Data":"64724d23b0448126df6bbff6e7b5b5737fe8842e1d5f168b00f2d4c7b509ab03"} Dec 01 18:45:15 crc kubenswrapper[4935]: I1201 18:45:15.096066 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=4.763618987 podStartE2EDuration="8.096035487s" podCreationTimestamp="2025-12-01 18:45:07 +0000 UTC" firstStartedPulling="2025-12-01 18:45:10.572105547 +0000 UTC m=+924.593734826" lastFinishedPulling="2025-12-01 18:45:13.904522067 +0000 UTC m=+927.926151326" observedRunningTime="2025-12-01 18:45:15.094783538 +0000 UTC m=+929.116412797" watchObservedRunningTime="2025-12-01 18:45:15.096035487 +0000 UTC m=+929.117664786" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.163073 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb"] Dec 01 18:45:19 crc kubenswrapper[4935]: E1201 18:45:19.163988 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" containerName="extract-utilities" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.164003 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" containerName="extract-utilities" Dec 01 18:45:19 crc kubenswrapper[4935]: E1201 18:45:19.164018 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" containerName="extract-content" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.164026 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" containerName="extract-content" Dec 01 18:45:19 crc kubenswrapper[4935]: E1201 18:45:19.164044 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" containerName="registry-server" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.164051 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" containerName="registry-server" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.164245 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="86ae5bf1-96aa-4ba2-9a3f-a9b163d4123c" containerName="registry-server" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.164795 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.168165 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-ca-bundle" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.168233 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-dockercfg-rdmnn" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.168271 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-distributor-http" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.168988 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-distributor-grpc" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.177618 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-config" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.194545 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb"] Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.250557 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/4c7e5318-7492-4aee-9738-c02c693a1ccd-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.250640 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4c7e5318-7492-4aee-9738-c02c693a1ccd-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.250677 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/4c7e5318-7492-4aee-9738-c02c693a1ccd-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.250717 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzz7j\" (UniqueName: \"kubernetes.io/projected/4c7e5318-7492-4aee-9738-c02c693a1ccd-kube-api-access-wzz7j\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.250803 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c7e5318-7492-4aee-9738-c02c693a1ccd-config\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.333406 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-hkjzj"] Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.334473 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.337324 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-querier-http" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.337743 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-querier-grpc" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.337762 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-s3" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.349712 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-hkjzj"] Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.351783 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c7e5318-7492-4aee-9738-c02c693a1ccd-config\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.351873 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/4c7e5318-7492-4aee-9738-c02c693a1ccd-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.351916 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4c7e5318-7492-4aee-9738-c02c693a1ccd-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.351939 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/4c7e5318-7492-4aee-9738-c02c693a1ccd-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.351969 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzz7j\" (UniqueName: \"kubernetes.io/projected/4c7e5318-7492-4aee-9738-c02c693a1ccd-kube-api-access-wzz7j\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.353049 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c7e5318-7492-4aee-9738-c02c693a1ccd-config\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.353049 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4c7e5318-7492-4aee-9738-c02c693a1ccd-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.362320 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/4c7e5318-7492-4aee-9738-c02c693a1ccd-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.379820 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/4c7e5318-7492-4aee-9738-c02c693a1ccd-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.393863 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzz7j\" (UniqueName: \"kubernetes.io/projected/4c7e5318-7492-4aee-9738-c02c693a1ccd-kube-api-access-wzz7j\") pod \"logging-loki-distributor-76cc67bf56-rvtlb\" (UID: \"4c7e5318-7492-4aee-9738-c02c693a1ccd\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.453478 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bde6947-75e2-4f05-b403-f010444ce0b8-config\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.453823 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/5bde6947-75e2-4f05-b403-f010444ce0b8-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.453873 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxzdg\" (UniqueName: \"kubernetes.io/projected/5bde6947-75e2-4f05-b403-f010444ce0b8-kube-api-access-zxzdg\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.453902 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/5bde6947-75e2-4f05-b403-f010444ce0b8-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.453917 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/5bde6947-75e2-4f05-b403-f010444ce0b8-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.453966 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5bde6947-75e2-4f05-b403-f010444ce0b8-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.491553 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.491919 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2"] Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.497038 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.498751 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-query-frontend-http" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.499096 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-query-frontend-grpc" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.503755 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2"] Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.555551 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/5bde6947-75e2-4f05-b403-f010444ce0b8-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.555606 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.555672 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-config\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.555731 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxzdg\" (UniqueName: \"kubernetes.io/projected/5bde6947-75e2-4f05-b403-f010444ce0b8-kube-api-access-zxzdg\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.555753 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.555779 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgtck\" (UniqueName: \"kubernetes.io/projected/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-kube-api-access-bgtck\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.555812 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/5bde6947-75e2-4f05-b403-f010444ce0b8-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.555859 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/5bde6947-75e2-4f05-b403-f010444ce0b8-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.555924 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5bde6947-75e2-4f05-b403-f010444ce0b8-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.555954 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.555977 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bde6947-75e2-4f05-b403-f010444ce0b8-config\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.557195 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bde6947-75e2-4f05-b403-f010444ce0b8-config\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.560320 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5bde6947-75e2-4f05-b403-f010444ce0b8-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.565368 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/5bde6947-75e2-4f05-b403-f010444ce0b8-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.570587 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/5bde6947-75e2-4f05-b403-f010444ce0b8-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.570889 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/5bde6947-75e2-4f05-b403-f010444ce0b8-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.579465 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxzdg\" (UniqueName: \"kubernetes.io/projected/5bde6947-75e2-4f05-b403-f010444ce0b8-kube-api-access-zxzdg\") pod \"logging-loki-querier-5895d59bb8-hkjzj\" (UID: \"5bde6947-75e2-4f05-b403-f010444ce0b8\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.613590 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-gateway-6bd679557b-fq7qm"] Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.614949 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.629758 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-gateway-6bd679557b-vs7f7"] Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.631011 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.632751 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-client-http" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.633074 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-gateway" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.633200 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.633254 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-http" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.633303 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-gateway-ca-bundle" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.636781 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-dockercfg-6x7sr" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.648127 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-6bd679557b-fq7qm"] Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.657913 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.658223 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-config\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.658365 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.658457 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgtck\" (UniqueName: \"kubernetes.io/projected/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-kube-api-access-bgtck\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.658580 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.660734 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.661378 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-config\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.667097 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.669214 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.700018 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.701639 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-6bd679557b-vs7f7"] Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.702998 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgtck\" (UniqueName: \"kubernetes.io/projected/7f489c6c-5824-4f46-8bda-7363d4b1d1e4-kube-api-access-bgtck\") pod \"logging-loki-query-frontend-84558f7c9f-jl8x2\" (UID: \"7f489c6c-5824-4f46-8bda-7363d4b1d1e4\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.759677 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-lokistack-gateway\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761172 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9vkw\" (UniqueName: \"kubernetes.io/projected/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-kube-api-access-x9vkw\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761211 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-logging-loki-ca-bundle\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761229 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761378 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-tenants\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761439 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-rbac\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761485 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-tls-secret\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761547 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-logging-loki-ca-bundle\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761598 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-lokistack-gateway\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761623 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761648 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761673 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-rbac\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761703 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-tls-secret\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761730 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761758 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-tenants\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.761781 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqhh4\" (UniqueName: \"kubernetes.io/projected/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-kube-api-access-sqhh4\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863522 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-lokistack-gateway\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863585 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863616 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863644 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-rbac\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863670 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-tls-secret\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863704 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863737 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-tenants\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863763 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqhh4\" (UniqueName: \"kubernetes.io/projected/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-kube-api-access-sqhh4\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863789 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-lokistack-gateway\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863812 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9vkw\" (UniqueName: \"kubernetes.io/projected/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-kube-api-access-x9vkw\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863841 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-logging-loki-ca-bundle\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863862 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863893 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-rbac\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863914 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-tenants\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863958 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-tls-secret\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.863986 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-logging-loki-ca-bundle\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.864843 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-lokistack-gateway\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.865171 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-logging-loki-ca-bundle\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.865231 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.865928 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-rbac\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.866253 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.866776 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-lokistack-gateway\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.866915 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-rbac\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.869687 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.870867 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.871033 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-tls-secret\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.871954 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-logging-loki-ca-bundle\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.872031 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-tenants\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.883112 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9vkw\" (UniqueName: \"kubernetes.io/projected/e4a3ab20-1697-4acf-9d3c-14037e5a78bd-kube-api-access-x9vkw\") pod \"logging-loki-gateway-6bd679557b-fq7qm\" (UID: \"e4a3ab20-1697-4acf-9d3c-14037e5a78bd\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.884087 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqhh4\" (UniqueName: \"kubernetes.io/projected/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-kube-api-access-sqhh4\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.884402 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-tenants\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.889648 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/8b4b5c73-8c85-42ec-88f4-9b703996e4c7-tls-secret\") pod \"logging-loki-gateway-6bd679557b-vs7f7\" (UID: \"8b4b5c73-8c85-42ec-88f4-9b703996e4c7\") " pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.899636 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:19 crc kubenswrapper[4935]: I1201 18:45:19.969956 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.003449 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.043563 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb"] Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.139961 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" event={"ID":"4c7e5318-7492-4aee-9738-c02c693a1ccd","Type":"ContainerStarted","Data":"851d39582d66ddc4156daabc237a5b5c86452e47c071abf112fac8f3ad290e0d"} Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.147081 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-hkjzj"] Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.327381 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.332784 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.335576 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-ingester-http" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.339412 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-ingester-grpc" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.356279 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.365436 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-6bd679557b-vs7f7"] Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.374388 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2"] Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.456893 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.457783 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: W1201 18:45:20.459439 4935 reflector.go:561] object-"openshift-logging"/"logging-loki-compactor-http": failed to list *v1.Secret: secrets "logging-loki-compactor-http" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-logging": no relationship found between node 'crc' and this object Dec 01 18:45:20 crc kubenswrapper[4935]: E1201 18:45:20.459479 4935 reflector.go:158] "Unhandled Error" err="object-\"openshift-logging\"/\"logging-loki-compactor-http\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"logging-loki-compactor-http\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-logging\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 01 18:45:20 crc kubenswrapper[4935]: W1201 18:45:20.459439 4935 reflector.go:561] object-"openshift-logging"/"logging-loki-compactor-grpc": failed to list *v1.Secret: secrets "logging-loki-compactor-grpc" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-logging": no relationship found between node 'crc' and this object Dec 01 18:45:20 crc kubenswrapper[4935]: E1201 18:45:20.459517 4935 reflector.go:158] "Unhandled Error" err="object-\"openshift-logging\"/\"logging-loki-compactor-grpc\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"logging-loki-compactor-grpc\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-logging\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.467626 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-6bd679557b-fq7qm"] Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.475601 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ab091c61-5df5-4858-9ee8-689bda097a6b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ab091c61-5df5-4858-9ee8-689bda097a6b\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.475660 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b96c12dc-cb12-4f7c-866b-c1cd3b038e78\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b96c12dc-cb12-4f7c-866b-c1cd3b038e78\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.475693 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/cc008841-9147-4cd8-894b-e54127c2a4ab-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.475764 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/cc008841-9147-4cd8-894b-e54127c2a4ab-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.475786 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/cc008841-9147-4cd8-894b-e54127c2a4ab-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.475833 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cc008841-9147-4cd8-894b-e54127c2a4ab-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.475871 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7hds\" (UniqueName: \"kubernetes.io/projected/cc008841-9147-4cd8-894b-e54127c2a4ab-kube-api-access-n7hds\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.475924 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc008841-9147-4cd8-894b-e54127c2a4ab-config\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.518310 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.569701 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.574001 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.575701 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-index-gateway-http" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.575942 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-index-gateway-grpc" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.576947 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/cc008841-9147-4cd8-894b-e54127c2a4ab-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.576993 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/cc008841-9147-4cd8-894b-e54127c2a4ab-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577034 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-16b2457a-b4dc-402e-8bc9-06b032d08a6f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-16b2457a-b4dc-402e-8bc9-06b032d08a6f\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577073 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47cd7efb-bd80-437c-b921-03dc4d3ee011-config\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577102 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cc008841-9147-4cd8-894b-e54127c2a4ab-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577132 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577175 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577207 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7hds\" (UniqueName: \"kubernetes.io/projected/cc008841-9147-4cd8-894b-e54127c2a4ab-kube-api-access-n7hds\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577249 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc008841-9147-4cd8-894b-e54127c2a4ab-config\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577287 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577328 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ab091c61-5df5-4858-9ee8-689bda097a6b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ab091c61-5df5-4858-9ee8-689bda097a6b\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577375 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b96c12dc-cb12-4f7c-866b-c1cd3b038e78\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b96c12dc-cb12-4f7c-866b-c1cd3b038e78\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577412 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/cc008841-9147-4cd8-894b-e54127c2a4ab-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577440 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577470 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7nml\" (UniqueName: \"kubernetes.io/projected/47cd7efb-bd80-437c-b921-03dc4d3ee011-kube-api-access-d7nml\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.577800 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.578984 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc008841-9147-4cd8-894b-e54127c2a4ab-config\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.579624 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cc008841-9147-4cd8-894b-e54127c2a4ab-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.583983 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/cc008841-9147-4cd8-894b-e54127c2a4ab-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.584105 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/cc008841-9147-4cd8-894b-e54127c2a4ab-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.584291 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/cc008841-9147-4cd8-894b-e54127c2a4ab-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.585214 4935 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.585248 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ab091c61-5df5-4858-9ee8-689bda097a6b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ab091c61-5df5-4858-9ee8-689bda097a6b\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f1753317a15a7ca7eaf38fda539e6173ae12ea02173c75f97d57d11df71dcdf5/globalmount\"" pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.585284 4935 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.585310 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b96c12dc-cb12-4f7c-866b-c1cd3b038e78\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b96c12dc-cb12-4f7c-866b-c1cd3b038e78\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ff6b47a5d8e7ccc967b6d6d3e6e11a20a057bce96f58ce1f3c0ac81fa472cb3a/globalmount\"" pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.604382 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7hds\" (UniqueName: \"kubernetes.io/projected/cc008841-9147-4cd8-894b-e54127c2a4ab-kube-api-access-n7hds\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.633016 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b96c12dc-cb12-4f7c-866b-c1cd3b038e78\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b96c12dc-cb12-4f7c-866b-c1cd3b038e78\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.639528 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ab091c61-5df5-4858-9ee8-689bda097a6b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ab091c61-5df5-4858-9ee8-689bda097a6b\") pod \"logging-loki-ingester-0\" (UID: \"cc008841-9147-4cd8-894b-e54127c2a4ab\") " pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.646661 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678477 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678525 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7nml\" (UniqueName: \"kubernetes.io/projected/47cd7efb-bd80-437c-b921-03dc4d3ee011-kube-api-access-d7nml\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678558 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6ccd3dc3-a558-453b-8222-46b9a2754051\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ccd3dc3-a558-453b-8222-46b9a2754051\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678603 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-16b2457a-b4dc-402e-8bc9-06b032d08a6f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-16b2457a-b4dc-402e-8bc9-06b032d08a6f\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678631 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-config\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678666 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678689 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678711 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678726 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678744 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678760 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678797 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7s5v\" (UniqueName: \"kubernetes.io/projected/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-kube-api-access-z7s5v\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678822 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47cd7efb-bd80-437c-b921-03dc4d3ee011-config\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.678842 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.679661 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.681417 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47cd7efb-bd80-437c-b921-03dc4d3ee011-config\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.683865 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.684266 4935 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.684317 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-16b2457a-b4dc-402e-8bc9-06b032d08a6f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-16b2457a-b4dc-402e-8bc9-06b032d08a6f\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f4cae1dbd46583b0f0622bdfc6e16c667ba8167c2472502fcb73391c8f06eb64/globalmount\"" pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.696706 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7nml\" (UniqueName: \"kubernetes.io/projected/47cd7efb-bd80-437c-b921-03dc4d3ee011-kube-api-access-d7nml\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.722395 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-16b2457a-b4dc-402e-8bc9-06b032d08a6f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-16b2457a-b4dc-402e-8bc9-06b032d08a6f\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.779999 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7s5v\" (UniqueName: \"kubernetes.io/projected/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-kube-api-access-z7s5v\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.780225 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6ccd3dc3-a558-453b-8222-46b9a2754051\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ccd3dc3-a558-453b-8222-46b9a2754051\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.780294 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-config\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.780391 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.780456 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.780490 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.780526 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.781271 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-config\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.782523 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.784069 4935 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.784125 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6ccd3dc3-a558-453b-8222-46b9a2754051\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ccd3dc3-a558-453b-8222-46b9a2754051\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1fa6ae8533dc835b01457475d8a50161176696b5f88568ef5161e3aaa84cfef6/globalmount\"" pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.785054 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.787033 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.787905 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.801208 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7s5v\" (UniqueName: \"kubernetes.io/projected/23e6d00c-6fc5-4564-b9a4-4357c10cc65e-kube-api-access-z7s5v\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.818665 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6ccd3dc3-a558-453b-8222-46b9a2754051\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6ccd3dc3-a558-453b-8222-46b9a2754051\") pod \"logging-loki-index-gateway-0\" (UID: \"23e6d00c-6fc5-4564-b9a4-4357c10cc65e\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:20 crc kubenswrapper[4935]: I1201 18:45:20.935335 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:21 crc kubenswrapper[4935]: I1201 18:45:21.084173 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 01 18:45:21 crc kubenswrapper[4935]: W1201 18:45:21.099653 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc008841_9147_4cd8_894b_e54127c2a4ab.slice/crio-ac81de611fa86781a6595f7e8523696c38c53931d6a6f6150472adbf2cf59bab WatchSource:0}: Error finding container ac81de611fa86781a6595f7e8523696c38c53931d6a6f6150472adbf2cf59bab: Status 404 returned error can't find the container with id ac81de611fa86781a6595f7e8523696c38c53931d6a6f6150472adbf2cf59bab Dec 01 18:45:21 crc kubenswrapper[4935]: I1201 18:45:21.148971 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" event={"ID":"8b4b5c73-8c85-42ec-88f4-9b703996e4c7","Type":"ContainerStarted","Data":"ed5531e084d3b1bf6f6d276e198fd08cb62be769c5415f2752d1c114abfb652d"} Dec 01 18:45:21 crc kubenswrapper[4935]: I1201 18:45:21.150369 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" event={"ID":"7f489c6c-5824-4f46-8bda-7363d4b1d1e4","Type":"ContainerStarted","Data":"99bef57b70aa2ad25032fe2c2a4e0abdae267cae1d205b9a79c2ddde8319a2a9"} Dec 01 18:45:21 crc kubenswrapper[4935]: I1201 18:45:21.152073 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" event={"ID":"e4a3ab20-1697-4acf-9d3c-14037e5a78bd","Type":"ContainerStarted","Data":"01baf1bc1d1f83010bceaed0b2990437b1fe3489ab95e793d875ce5a3d86ce66"} Dec 01 18:45:21 crc kubenswrapper[4935]: I1201 18:45:21.156899 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-ingester-0" event={"ID":"cc008841-9147-4cd8-894b-e54127c2a4ab","Type":"ContainerStarted","Data":"ac81de611fa86781a6595f7e8523696c38c53931d6a6f6150472adbf2cf59bab"} Dec 01 18:45:21 crc kubenswrapper[4935]: I1201 18:45:21.158214 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" event={"ID":"5bde6947-75e2-4f05-b403-f010444ce0b8","Type":"ContainerStarted","Data":"75dd7f9e88813ae96964fcbd9d5238cfe82ebf9a91ab5c37e1972c88715237ae"} Dec 01 18:45:21 crc kubenswrapper[4935]: I1201 18:45:21.318475 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-compactor-http" Dec 01 18:45:21 crc kubenswrapper[4935]: I1201 18:45:21.327026 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:21 crc kubenswrapper[4935]: I1201 18:45:21.430464 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 01 18:45:21 crc kubenswrapper[4935]: E1201 18:45:21.682024 4935 secret.go:188] Couldn't get secret openshift-logging/logging-loki-compactor-grpc: failed to sync secret cache: timed out waiting for the condition Dec 01 18:45:21 crc kubenswrapper[4935]: E1201 18:45:21.682139 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-compactor-grpc podName:47cd7efb-bd80-437c-b921-03dc4d3ee011 nodeName:}" failed. No retries permitted until 2025-12-01 18:45:22.182110822 +0000 UTC m=+936.203740101 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "logging-loki-compactor-grpc" (UniqueName: "kubernetes.io/secret/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-compactor-grpc") pod "logging-loki-compactor-0" (UID: "47cd7efb-bd80-437c-b921-03dc4d3ee011") : failed to sync secret cache: timed out waiting for the condition Dec 01 18:45:21 crc kubenswrapper[4935]: I1201 18:45:21.933904 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-compactor-grpc" Dec 01 18:45:22 crc kubenswrapper[4935]: I1201 18:45:22.165829 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-index-gateway-0" event={"ID":"23e6d00c-6fc5-4564-b9a4-4357c10cc65e","Type":"ContainerStarted","Data":"aa8542bc16e6f16c9780ba1dcbdf1fef841fdec0ca65e02ecc4852ca4cd1a4ec"} Dec 01 18:45:22 crc kubenswrapper[4935]: I1201 18:45:22.204951 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:22 crc kubenswrapper[4935]: I1201 18:45:22.208910 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/47cd7efb-bd80-437c-b921-03dc4d3ee011-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"47cd7efb-bd80-437c-b921-03dc4d3ee011\") " pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:22 crc kubenswrapper[4935]: I1201 18:45:22.278767 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:22 crc kubenswrapper[4935]: I1201 18:45:22.705108 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 01 18:45:23 crc kubenswrapper[4935]: W1201 18:45:23.212759 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47cd7efb_bd80_437c_b921_03dc4d3ee011.slice/crio-3414f98f9a8dcb48b7d255b96086f78839af356a43cac73a6933156f07b5442f WatchSource:0}: Error finding container 3414f98f9a8dcb48b7d255b96086f78839af356a43cac73a6933156f07b5442f: Status 404 returned error can't find the container with id 3414f98f9a8dcb48b7d255b96086f78839af356a43cac73a6933156f07b5442f Dec 01 18:45:24 crc kubenswrapper[4935]: I1201 18:45:24.195000 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-compactor-0" event={"ID":"47cd7efb-bd80-437c-b921-03dc4d3ee011","Type":"ContainerStarted","Data":"3414f98f9a8dcb48b7d255b96086f78839af356a43cac73a6933156f07b5442f"} Dec 01 18:45:24 crc kubenswrapper[4935]: I1201 18:45:24.345793 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:45:24 crc kubenswrapper[4935]: I1201 18:45:24.345869 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.209509 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" event={"ID":"7f489c6c-5824-4f46-8bda-7363d4b1d1e4","Type":"ContainerStarted","Data":"7ec1c43f5fcd7e65090726ebed69d6014cdd81568e879bbba925ef84b0ad2893"} Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.209975 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.212219 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-index-gateway-0" event={"ID":"23e6d00c-6fc5-4564-b9a4-4357c10cc65e","Type":"ContainerStarted","Data":"417280e470457ac0669f385a5878e20b2b94eaa4d388858098cee6ff96c12ea7"} Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.212394 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.218526 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" event={"ID":"e4a3ab20-1697-4acf-9d3c-14037e5a78bd","Type":"ContainerStarted","Data":"150948f6613a99bdef076b8a854e8a2f700b686f0cefb13a9afebd9f77f0bbf2"} Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.221117 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-ingester-0" event={"ID":"cc008841-9147-4cd8-894b-e54127c2a4ab","Type":"ContainerStarted","Data":"420c0ff71ae84072c0e6ab6ebf26ccb4d295b350b3d5c93bd6b360fde79de0e8"} Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.221296 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.223463 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" event={"ID":"5bde6947-75e2-4f05-b403-f010444ce0b8","Type":"ContainerStarted","Data":"4b748c032ed2d44018adac54b884ab9c317c445e001ba22df9f3325e6a2afc8c"} Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.224405 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.226439 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-compactor-0" event={"ID":"47cd7efb-bd80-437c-b921-03dc4d3ee011","Type":"ContainerStarted","Data":"6d52a91a3678f0c89e7f5b98edada0a7004f9073ee59e5d0c383c49405c46ec2"} Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.226494 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.228320 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" event={"ID":"4c7e5318-7492-4aee-9738-c02c693a1ccd","Type":"ContainerStarted","Data":"fad70178589e19f5cee87fe57c2fa41eaae6d6763670e336998593bb7a43ff6d"} Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.228469 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.230281 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" event={"ID":"8b4b5c73-8c85-42ec-88f4-9b703996e4c7","Type":"ContainerStarted","Data":"ef9467c975067d3aa39d0dd0b05bf78112ff38761bea17cfa056eb79028ddb97"} Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.240124 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" podStartSLOduration=2.400954334 podStartE2EDuration="6.240100628s" podCreationTimestamp="2025-12-01 18:45:19 +0000 UTC" firstStartedPulling="2025-12-01 18:45:20.366910863 +0000 UTC m=+934.388540122" lastFinishedPulling="2025-12-01 18:45:24.206057157 +0000 UTC m=+938.227686416" observedRunningTime="2025-12-01 18:45:25.235305817 +0000 UTC m=+939.256935086" watchObservedRunningTime="2025-12-01 18:45:25.240100628 +0000 UTC m=+939.261729927" Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.277710 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-compactor-0" podStartSLOduration=5.289611594 podStartE2EDuration="6.277678628s" podCreationTimestamp="2025-12-01 18:45:19 +0000 UTC" firstStartedPulling="2025-12-01 18:45:23.216505826 +0000 UTC m=+937.238135085" lastFinishedPulling="2025-12-01 18:45:24.20457286 +0000 UTC m=+938.226202119" observedRunningTime="2025-12-01 18:45:25.268940722 +0000 UTC m=+939.290570081" watchObservedRunningTime="2025-12-01 18:45:25.277678628 +0000 UTC m=+939.299307927" Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.301290 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-index-gateway-0" podStartSLOduration=3.5698771689999997 podStartE2EDuration="6.301267666s" podCreationTimestamp="2025-12-01 18:45:19 +0000 UTC" firstStartedPulling="2025-12-01 18:45:21.443220933 +0000 UTC m=+935.464850192" lastFinishedPulling="2025-12-01 18:45:24.17461143 +0000 UTC m=+938.196240689" observedRunningTime="2025-12-01 18:45:25.294852512 +0000 UTC m=+939.316481781" watchObservedRunningTime="2025-12-01 18:45:25.301267666 +0000 UTC m=+939.322896925" Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.314923 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" podStartSLOduration=2.351810057 podStartE2EDuration="6.314907618s" podCreationTimestamp="2025-12-01 18:45:19 +0000 UTC" firstStartedPulling="2025-12-01 18:45:20.158864671 +0000 UTC m=+934.180493930" lastFinishedPulling="2025-12-01 18:45:24.121962232 +0000 UTC m=+938.143591491" observedRunningTime="2025-12-01 18:45:25.31434809 +0000 UTC m=+939.335977359" watchObservedRunningTime="2025-12-01 18:45:25.314907618 +0000 UTC m=+939.336536877" Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.342799 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-ingester-0" podStartSLOduration=3.1978046 podStartE2EDuration="6.342775571s" podCreationTimestamp="2025-12-01 18:45:19 +0000 UTC" firstStartedPulling="2025-12-01 18:45:21.102211869 +0000 UTC m=+935.123841128" lastFinishedPulling="2025-12-01 18:45:24.24718281 +0000 UTC m=+938.268812099" observedRunningTime="2025-12-01 18:45:25.335985476 +0000 UTC m=+939.357614775" watchObservedRunningTime="2025-12-01 18:45:25.342775571 +0000 UTC m=+939.364404840" Dec 01 18:45:25 crc kubenswrapper[4935]: I1201 18:45:25.361562 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" podStartSLOduration=2.24365003 podStartE2EDuration="6.361544896s" podCreationTimestamp="2025-12-01 18:45:19 +0000 UTC" firstStartedPulling="2025-12-01 18:45:20.072005659 +0000 UTC m=+934.093634918" lastFinishedPulling="2025-12-01 18:45:24.189900525 +0000 UTC m=+938.211529784" observedRunningTime="2025-12-01 18:45:25.353208431 +0000 UTC m=+939.374837690" watchObservedRunningTime="2025-12-01 18:45:25.361544896 +0000 UTC m=+939.383174175" Dec 01 18:45:27 crc kubenswrapper[4935]: I1201 18:45:27.249436 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" event={"ID":"8b4b5c73-8c85-42ec-88f4-9b703996e4c7","Type":"ContainerStarted","Data":"3940e9824668d20065c0ca9ef2edcc71f66d3db0a5b281b3d224dc0104c4f485"} Dec 01 18:45:27 crc kubenswrapper[4935]: I1201 18:45:27.249865 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:27 crc kubenswrapper[4935]: I1201 18:45:27.252251 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" event={"ID":"e4a3ab20-1697-4acf-9d3c-14037e5a78bd","Type":"ContainerStarted","Data":"6df7701aae48e791b87810b450e939af1186822485fd3734828d0afbdeb31034"} Dec 01 18:45:27 crc kubenswrapper[4935]: I1201 18:45:27.252537 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:27 crc kubenswrapper[4935]: I1201 18:45:27.267002 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:27 crc kubenswrapper[4935]: I1201 18:45:27.268093 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:27 crc kubenswrapper[4935]: I1201 18:45:27.278833 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" podStartSLOduration=2.072023622 podStartE2EDuration="8.27881668s" podCreationTimestamp="2025-12-01 18:45:19 +0000 UTC" firstStartedPulling="2025-12-01 18:45:20.362140211 +0000 UTC m=+934.383769460" lastFinishedPulling="2025-12-01 18:45:26.568933259 +0000 UTC m=+940.590562518" observedRunningTime="2025-12-01 18:45:27.272610424 +0000 UTC m=+941.294239693" watchObservedRunningTime="2025-12-01 18:45:27.27881668 +0000 UTC m=+941.300445959" Dec 01 18:45:27 crc kubenswrapper[4935]: I1201 18:45:27.362744 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" podStartSLOduration=2.272297749 podStartE2EDuration="8.362710798s" podCreationTimestamp="2025-12-01 18:45:19 +0000 UTC" firstStartedPulling="2025-12-01 18:45:20.487216905 +0000 UTC m=+934.508846164" lastFinishedPulling="2025-12-01 18:45:26.577629944 +0000 UTC m=+940.599259213" observedRunningTime="2025-12-01 18:45:27.353991562 +0000 UTC m=+941.375620831" watchObservedRunningTime="2025-12-01 18:45:27.362710798 +0000 UTC m=+941.384340077" Dec 01 18:45:28 crc kubenswrapper[4935]: I1201 18:45:28.260201 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:28 crc kubenswrapper[4935]: I1201 18:45:28.260690 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:28 crc kubenswrapper[4935]: I1201 18:45:28.273749 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-6bd679557b-vs7f7" Dec 01 18:45:28 crc kubenswrapper[4935]: I1201 18:45:28.275008 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-6bd679557b-fq7qm" Dec 01 18:45:39 crc kubenswrapper[4935]: I1201 18:45:39.500282 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-distributor-76cc67bf56-rvtlb" Dec 01 18:45:39 crc kubenswrapper[4935]: I1201 18:45:39.669095 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-querier-5895d59bb8-hkjzj" Dec 01 18:45:39 crc kubenswrapper[4935]: I1201 18:45:39.905008 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-jl8x2" Dec 01 18:45:40 crc kubenswrapper[4935]: I1201 18:45:40.655630 4935 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: this instance owns no tokens Dec 01 18:45:40 crc kubenswrapper[4935]: I1201 18:45:40.655709 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="cc008841-9147-4cd8-894b-e54127c2a4ab" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 01 18:45:40 crc kubenswrapper[4935]: I1201 18:45:40.945593 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-index-gateway-0" Dec 01 18:45:42 crc kubenswrapper[4935]: I1201 18:45:42.288747 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-compactor-0" Dec 01 18:45:50 crc kubenswrapper[4935]: I1201 18:45:50.655604 4935 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: this instance owns no tokens Dec 01 18:45:50 crc kubenswrapper[4935]: I1201 18:45:50.656427 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="cc008841-9147-4cd8-894b-e54127c2a4ab" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 01 18:45:54 crc kubenswrapper[4935]: I1201 18:45:54.345969 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:45:54 crc kubenswrapper[4935]: I1201 18:45:54.346451 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:46:00 crc kubenswrapper[4935]: I1201 18:46:00.653879 4935 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: waiting for 15s after being ready Dec 01 18:46:00 crc kubenswrapper[4935]: I1201 18:46:00.654549 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="cc008841-9147-4cd8-894b-e54127c2a4ab" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 01 18:46:10 crc kubenswrapper[4935]: I1201 18:46:10.654987 4935 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: waiting for 15s after being ready Dec 01 18:46:10 crc kubenswrapper[4935]: I1201 18:46:10.656096 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="cc008841-9147-4cd8-894b-e54127c2a4ab" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 01 18:46:20 crc kubenswrapper[4935]: I1201 18:46:20.652832 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-ingester-0" Dec 01 18:46:24 crc kubenswrapper[4935]: I1201 18:46:24.346715 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:46:24 crc kubenswrapper[4935]: I1201 18:46:24.347320 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:46:24 crc kubenswrapper[4935]: I1201 18:46:24.347413 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:46:24 crc kubenswrapper[4935]: I1201 18:46:24.348742 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"744bd448e7cc386bf9953720a69481d4b4d71c4c1477d84184ae1c1693198763"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 18:46:24 crc kubenswrapper[4935]: I1201 18:46:24.348888 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://744bd448e7cc386bf9953720a69481d4b4d71c4c1477d84184ae1c1693198763" gracePeriod=600 Dec 01 18:46:24 crc kubenswrapper[4935]: E1201 18:46:24.936134 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56ed42c4_6eca_40bb_8eb4_0f9c2b7d1522.slice/crio-conmon-744bd448e7cc386bf9953720a69481d4b4d71c4c1477d84184ae1c1693198763.scope\": RecentStats: unable to find data in memory cache]" Dec 01 18:46:25 crc kubenswrapper[4935]: I1201 18:46:25.025803 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="744bd448e7cc386bf9953720a69481d4b4d71c4c1477d84184ae1c1693198763" exitCode=0 Dec 01 18:46:25 crc kubenswrapper[4935]: I1201 18:46:25.025861 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"744bd448e7cc386bf9953720a69481d4b4d71c4c1477d84184ae1c1693198763"} Dec 01 18:46:25 crc kubenswrapper[4935]: I1201 18:46:25.025902 4935 scope.go:117] "RemoveContainer" containerID="f03453b0bfa80f20c0452bdf75f21c17981d4d486ecd0aa51da07478cdd727f3" Dec 01 18:46:26 crc kubenswrapper[4935]: I1201 18:46:26.038898 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"89d0c184ee1dbdba2189f946ff97ea233b33f6dde95b0c4dc3f41a9fad7d86ae"} Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.308670 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/collector-zdz58"] Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.310743 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.317874 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-q8zfx" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.317899 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.318766 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.318798 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.318894 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.326529 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.328704 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-zdz58"] Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.375476 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghpfz\" (UniqueName: \"kubernetes.io/projected/d7523c49-811b-4ff8-b000-d1644f1e7683-kube-api-access-ghpfz\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.375540 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-metrics\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.375689 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-config\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.375845 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-config-openshift-service-cacrt\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.375896 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-syslog-receiver\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.375921 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/d7523c49-811b-4ff8-b000-d1644f1e7683-sa-token\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.375940 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-token\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.375964 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/d7523c49-811b-4ff8-b000-d1644f1e7683-datadir\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.376035 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-trusted-ca\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.376069 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-entrypoint\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.376087 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/d7523c49-811b-4ff8-b000-d1644f1e7683-tmp\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.475200 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/collector-zdz58"] Dec 01 18:46:40 crc kubenswrapper[4935]: E1201 18:46:40.475745 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[collector-syslog-receiver collector-token config config-openshift-service-cacrt datadir entrypoint kube-api-access-ghpfz metrics sa-token tmp trusted-ca], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openshift-logging/collector-zdz58" podUID="d7523c49-811b-4ff8-b000-d1644f1e7683" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.476944 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-syslog-receiver\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.476991 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/d7523c49-811b-4ff8-b000-d1644f1e7683-sa-token\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.477018 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-token\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.477041 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/d7523c49-811b-4ff8-b000-d1644f1e7683-datadir\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.477063 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-trusted-ca\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.477081 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-entrypoint\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.477103 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/d7523c49-811b-4ff8-b000-d1644f1e7683-tmp\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.477121 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghpfz\" (UniqueName: \"kubernetes.io/projected/d7523c49-811b-4ff8-b000-d1644f1e7683-kube-api-access-ghpfz\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.477138 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-metrics\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.477203 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-config\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.477254 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-config-openshift-service-cacrt\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.477935 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-config-openshift-service-cacrt\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: E1201 18:46:40.478022 4935 secret.go:188] Couldn't get secret openshift-logging/collector-syslog-receiver: secret "collector-syslog-receiver" not found Dec 01 18:46:40 crc kubenswrapper[4935]: E1201 18:46:40.478067 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-syslog-receiver podName:d7523c49-811b-4ff8-b000-d1644f1e7683 nodeName:}" failed. No retries permitted until 2025-12-01 18:46:40.978053509 +0000 UTC m=+1014.999682768 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "collector-syslog-receiver" (UniqueName: "kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-syslog-receiver") pod "collector-zdz58" (UID: "d7523c49-811b-4ff8-b000-d1644f1e7683") : secret "collector-syslog-receiver" not found Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.479192 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/d7523c49-811b-4ff8-b000-d1644f1e7683-datadir\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.479773 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-trusted-ca\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: E1201 18:46:40.480908 4935 secret.go:188] Couldn't get secret openshift-logging/collector-metrics: secret "collector-metrics" not found Dec 01 18:46:40 crc kubenswrapper[4935]: E1201 18:46:40.481025 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-metrics podName:d7523c49-811b-4ff8-b000-d1644f1e7683 nodeName:}" failed. No retries permitted until 2025-12-01 18:46:40.981008423 +0000 UTC m=+1015.002637692 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics" (UniqueName: "kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-metrics") pod "collector-zdz58" (UID: "d7523c49-811b-4ff8-b000-d1644f1e7683") : secret "collector-metrics" not found Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.481314 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-entrypoint\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.481402 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-config\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.486529 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/d7523c49-811b-4ff8-b000-d1644f1e7683-tmp\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.490531 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-token\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.501735 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/d7523c49-811b-4ff8-b000-d1644f1e7683-sa-token\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.505086 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghpfz\" (UniqueName: \"kubernetes.io/projected/d7523c49-811b-4ff8-b000-d1644f1e7683-kube-api-access-ghpfz\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.986999 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-metrics\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.987252 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-syslog-receiver\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.991911 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-syslog-receiver\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:40 crc kubenswrapper[4935]: I1201 18:46:40.992590 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-metrics\") pod \"collector-zdz58\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " pod="openshift-logging/collector-zdz58" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.171093 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-zdz58" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.184641 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-zdz58" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.189469 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/d7523c49-811b-4ff8-b000-d1644f1e7683-datadir\") pod \"d7523c49-811b-4ff8-b000-d1644f1e7683\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.189733 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/d7523c49-811b-4ff8-b000-d1644f1e7683-sa-token\") pod \"d7523c49-811b-4ff8-b000-d1644f1e7683\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.189856 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-entrypoint\") pod \"d7523c49-811b-4ff8-b000-d1644f1e7683\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.189586 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d7523c49-811b-4ff8-b000-d1644f1e7683-datadir" (OuterVolumeSpecName: "datadir") pod "d7523c49-811b-4ff8-b000-d1644f1e7683" (UID: "d7523c49-811b-4ff8-b000-d1644f1e7683"). InnerVolumeSpecName "datadir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.190319 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-entrypoint" (OuterVolumeSpecName: "entrypoint") pod "d7523c49-811b-4ff8-b000-d1644f1e7683" (UID: "d7523c49-811b-4ff8-b000-d1644f1e7683"). InnerVolumeSpecName "entrypoint". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.190325 4935 reconciler_common.go:293] "Volume detached for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/d7523c49-811b-4ff8-b000-d1644f1e7683-datadir\") on node \"crc\" DevicePath \"\"" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.194263 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7523c49-811b-4ff8-b000-d1644f1e7683-sa-token" (OuterVolumeSpecName: "sa-token") pod "d7523c49-811b-4ff8-b000-d1644f1e7683" (UID: "d7523c49-811b-4ff8-b000-d1644f1e7683"). InnerVolumeSpecName "sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.292038 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-syslog-receiver\") pod \"d7523c49-811b-4ff8-b000-d1644f1e7683\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.292122 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-token\") pod \"d7523c49-811b-4ff8-b000-d1644f1e7683\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.292485 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-metrics\") pod \"d7523c49-811b-4ff8-b000-d1644f1e7683\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.292582 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghpfz\" (UniqueName: \"kubernetes.io/projected/d7523c49-811b-4ff8-b000-d1644f1e7683-kube-api-access-ghpfz\") pod \"d7523c49-811b-4ff8-b000-d1644f1e7683\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.292624 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-trusted-ca\") pod \"d7523c49-811b-4ff8-b000-d1644f1e7683\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.292685 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-config-openshift-service-cacrt\") pod \"d7523c49-811b-4ff8-b000-d1644f1e7683\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.292720 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-config\") pod \"d7523c49-811b-4ff8-b000-d1644f1e7683\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.292754 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/d7523c49-811b-4ff8-b000-d1644f1e7683-tmp\") pod \"d7523c49-811b-4ff8-b000-d1644f1e7683\" (UID: \"d7523c49-811b-4ff8-b000-d1644f1e7683\") " Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.293371 4935 reconciler_common.go:293] "Volume detached for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/d7523c49-811b-4ff8-b000-d1644f1e7683-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.293403 4935 reconciler_common.go:293] "Volume detached for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-entrypoint\") on node \"crc\" DevicePath \"\"" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.294091 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-config-openshift-service-cacrt" (OuterVolumeSpecName: "config-openshift-service-cacrt") pod "d7523c49-811b-4ff8-b000-d1644f1e7683" (UID: "d7523c49-811b-4ff8-b000-d1644f1e7683"). InnerVolumeSpecName "config-openshift-service-cacrt". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.294317 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-config" (OuterVolumeSpecName: "config") pod "d7523c49-811b-4ff8-b000-d1644f1e7683" (UID: "d7523c49-811b-4ff8-b000-d1644f1e7683"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.294946 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "d7523c49-811b-4ff8-b000-d1644f1e7683" (UID: "d7523c49-811b-4ff8-b000-d1644f1e7683"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.297210 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7523c49-811b-4ff8-b000-d1644f1e7683-kube-api-access-ghpfz" (OuterVolumeSpecName: "kube-api-access-ghpfz") pod "d7523c49-811b-4ff8-b000-d1644f1e7683" (UID: "d7523c49-811b-4ff8-b000-d1644f1e7683"). InnerVolumeSpecName "kube-api-access-ghpfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.296463 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-syslog-receiver" (OuterVolumeSpecName: "collector-syslog-receiver") pod "d7523c49-811b-4ff8-b000-d1644f1e7683" (UID: "d7523c49-811b-4ff8-b000-d1644f1e7683"). InnerVolumeSpecName "collector-syslog-receiver". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.297688 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-metrics" (OuterVolumeSpecName: "metrics") pod "d7523c49-811b-4ff8-b000-d1644f1e7683" (UID: "d7523c49-811b-4ff8-b000-d1644f1e7683"). InnerVolumeSpecName "metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.298877 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7523c49-811b-4ff8-b000-d1644f1e7683-tmp" (OuterVolumeSpecName: "tmp") pod "d7523c49-811b-4ff8-b000-d1644f1e7683" (UID: "d7523c49-811b-4ff8-b000-d1644f1e7683"). InnerVolumeSpecName "tmp". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.299180 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-token" (OuterVolumeSpecName: "collector-token") pod "d7523c49-811b-4ff8-b000-d1644f1e7683" (UID: "d7523c49-811b-4ff8-b000-d1644f1e7683"). InnerVolumeSpecName "collector-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.394995 4935 reconciler_common.go:293] "Volume detached for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-metrics\") on node \"crc\" DevicePath \"\"" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.395050 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghpfz\" (UniqueName: \"kubernetes.io/projected/d7523c49-811b-4ff8-b000-d1644f1e7683-kube-api-access-ghpfz\") on node \"crc\" DevicePath \"\"" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.395069 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.395087 4935 reconciler_common.go:293] "Volume detached for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-config-openshift-service-cacrt\") on node \"crc\" DevicePath \"\"" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.395106 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7523c49-811b-4ff8-b000-d1644f1e7683-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.395126 4935 reconciler_common.go:293] "Volume detached for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/d7523c49-811b-4ff8-b000-d1644f1e7683-tmp\") on node \"crc\" DevicePath \"\"" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.395169 4935 reconciler_common.go:293] "Volume detached for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-syslog-receiver\") on node \"crc\" DevicePath \"\"" Dec 01 18:46:41 crc kubenswrapper[4935]: I1201 18:46:41.395187 4935 reconciler_common.go:293] "Volume detached for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/d7523c49-811b-4ff8-b000-d1644f1e7683-collector-token\") on node \"crc\" DevicePath \"\"" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.180843 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-zdz58" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.254793 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/collector-zdz58"] Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.300554 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-logging/collector-zdz58"] Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.316627 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/collector-qlgrt"] Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.320877 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.323515 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.323648 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.326157 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.326311 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.326364 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-q8zfx" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.332719 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.334759 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-qlgrt"] Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.415012 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-config\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.415228 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-datadir\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.415287 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-metrics\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.415339 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klk46\" (UniqueName: \"kubernetes.io/projected/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-kube-api-access-klk46\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.415404 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-entrypoint\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.415496 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-sa-token\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.415537 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-tmp\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.415609 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-collector-syslog-receiver\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.415655 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-trusted-ca\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.415699 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-collector-token\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.415758 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-config-openshift-service-cacrt\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.517715 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-collector-syslog-receiver\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.517809 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-trusted-ca\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.517860 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-collector-token\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.517904 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-config-openshift-service-cacrt\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.517961 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-config\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.518039 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-datadir\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.518079 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-metrics\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.518123 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klk46\" (UniqueName: \"kubernetes.io/projected/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-kube-api-access-klk46\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.518224 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-entrypoint\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.518308 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-sa-token\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.518345 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-tmp\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.520799 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-trusted-ca\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.520798 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-config-openshift-service-cacrt\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.520879 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-entrypoint\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.520959 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-datadir\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.522065 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-config\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.526591 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-tmp\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.527715 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-collector-token\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.527989 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7523c49-811b-4ff8-b000-d1644f1e7683" path="/var/lib/kubelet/pods/d7523c49-811b-4ff8-b000-d1644f1e7683/volumes" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.529256 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-metrics\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.531625 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-collector-syslog-receiver\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.536908 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-sa-token\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.541961 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klk46\" (UniqueName: \"kubernetes.io/projected/b40b5d13-a124-4ee9-a16e-cb21e2fcd047-kube-api-access-klk46\") pod \"collector-qlgrt\" (UID: \"b40b5d13-a124-4ee9-a16e-cb21e2fcd047\") " pod="openshift-logging/collector-qlgrt" Dec 01 18:46:42 crc kubenswrapper[4935]: I1201 18:46:42.641014 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-qlgrt" Dec 01 18:46:43 crc kubenswrapper[4935]: I1201 18:46:43.158523 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-qlgrt"] Dec 01 18:46:43 crc kubenswrapper[4935]: I1201 18:46:43.189105 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-qlgrt" event={"ID":"b40b5d13-a124-4ee9-a16e-cb21e2fcd047","Type":"ContainerStarted","Data":"a5012e1d58d93d7af4c6a4ce2ce700acede59dae0856015819ac1fb589d801bc"} Dec 01 18:46:51 crc kubenswrapper[4935]: I1201 18:46:51.255428 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-qlgrt" event={"ID":"b40b5d13-a124-4ee9-a16e-cb21e2fcd047","Type":"ContainerStarted","Data":"42f0e992762aaeae7cfb1770518b16e89be285b503293f84b5b4d8dff0462a86"} Dec 01 18:46:51 crc kubenswrapper[4935]: I1201 18:46:51.280913 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/collector-qlgrt" podStartSLOduration=2.186057247 podStartE2EDuration="9.280894621s" podCreationTimestamp="2025-12-01 18:46:42 +0000 UTC" firstStartedPulling="2025-12-01 18:46:43.165903137 +0000 UTC m=+1017.187532396" lastFinishedPulling="2025-12-01 18:46:50.260740491 +0000 UTC m=+1024.282369770" observedRunningTime="2025-12-01 18:46:51.274492709 +0000 UTC m=+1025.296121968" watchObservedRunningTime="2025-12-01 18:46:51.280894621 +0000 UTC m=+1025.302523880" Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.091461 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj"] Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.093944 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.098302 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.102371 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj"] Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.165049 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f58dd92e-7424-4a8b-a842-54d631dffd17-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj\" (UID: \"f58dd92e-7424-4a8b-a842-54d631dffd17\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.165126 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hc284\" (UniqueName: \"kubernetes.io/projected/f58dd92e-7424-4a8b-a842-54d631dffd17-kube-api-access-hc284\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj\" (UID: \"f58dd92e-7424-4a8b-a842-54d631dffd17\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.165205 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f58dd92e-7424-4a8b-a842-54d631dffd17-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj\" (UID: \"f58dd92e-7424-4a8b-a842-54d631dffd17\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.266717 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f58dd92e-7424-4a8b-a842-54d631dffd17-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj\" (UID: \"f58dd92e-7424-4a8b-a842-54d631dffd17\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.266822 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f58dd92e-7424-4a8b-a842-54d631dffd17-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj\" (UID: \"f58dd92e-7424-4a8b-a842-54d631dffd17\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.266869 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hc284\" (UniqueName: \"kubernetes.io/projected/f58dd92e-7424-4a8b-a842-54d631dffd17-kube-api-access-hc284\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj\" (UID: \"f58dd92e-7424-4a8b-a842-54d631dffd17\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.267508 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f58dd92e-7424-4a8b-a842-54d631dffd17-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj\" (UID: \"f58dd92e-7424-4a8b-a842-54d631dffd17\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.267617 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f58dd92e-7424-4a8b-a842-54d631dffd17-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj\" (UID: \"f58dd92e-7424-4a8b-a842-54d631dffd17\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.285300 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hc284\" (UniqueName: \"kubernetes.io/projected/f58dd92e-7424-4a8b-a842-54d631dffd17-kube-api-access-hc284\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj\" (UID: \"f58dd92e-7424-4a8b-a842-54d631dffd17\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.427923 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" Dec 01 18:47:21 crc kubenswrapper[4935]: I1201 18:47:21.880897 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj"] Dec 01 18:47:22 crc kubenswrapper[4935]: I1201 18:47:22.536383 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" event={"ID":"f58dd92e-7424-4a8b-a842-54d631dffd17","Type":"ContainerStarted","Data":"6a187be42fab8d60abb228aa2ca5c0517b5b6e844c8d4be8d6bc23d9548782da"} Dec 01 18:47:23 crc kubenswrapper[4935]: I1201 18:47:23.547504 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" event={"ID":"f58dd92e-7424-4a8b-a842-54d631dffd17","Type":"ContainerStarted","Data":"7b5b16c6238ae4dc68e7ef4381a4e6c15a589430b6dc9f328aba730e323e8469"} Dec 01 18:47:24 crc kubenswrapper[4935]: I1201 18:47:24.560430 4935 generic.go:334] "Generic (PLEG): container finished" podID="f58dd92e-7424-4a8b-a842-54d631dffd17" containerID="7b5b16c6238ae4dc68e7ef4381a4e6c15a589430b6dc9f328aba730e323e8469" exitCode=0 Dec 01 18:47:24 crc kubenswrapper[4935]: I1201 18:47:24.560541 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" event={"ID":"f58dd92e-7424-4a8b-a842-54d631dffd17","Type":"ContainerDied","Data":"7b5b16c6238ae4dc68e7ef4381a4e6c15a589430b6dc9f328aba730e323e8469"} Dec 01 18:47:27 crc kubenswrapper[4935]: I1201 18:47:27.581615 4935 generic.go:334] "Generic (PLEG): container finished" podID="f58dd92e-7424-4a8b-a842-54d631dffd17" containerID="77b072ac39430302db4376253cf77b37d7c166ed9355ce11f4b5e1f5f34f5f5e" exitCode=0 Dec 01 18:47:27 crc kubenswrapper[4935]: I1201 18:47:27.581701 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" event={"ID":"f58dd92e-7424-4a8b-a842-54d631dffd17","Type":"ContainerDied","Data":"77b072ac39430302db4376253cf77b37d7c166ed9355ce11f4b5e1f5f34f5f5e"} Dec 01 18:47:28 crc kubenswrapper[4935]: I1201 18:47:28.591788 4935 generic.go:334] "Generic (PLEG): container finished" podID="f58dd92e-7424-4a8b-a842-54d631dffd17" containerID="df308f8f8c8eef43f6330665a26e75c16ab3e91adf67c623d1997c31dbb70d2d" exitCode=0 Dec 01 18:47:28 crc kubenswrapper[4935]: I1201 18:47:28.591851 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" event={"ID":"f58dd92e-7424-4a8b-a842-54d631dffd17","Type":"ContainerDied","Data":"df308f8f8c8eef43f6330665a26e75c16ab3e91adf67c623d1997c31dbb70d2d"} Dec 01 18:47:29 crc kubenswrapper[4935]: I1201 18:47:29.883832 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" Dec 01 18:47:30 crc kubenswrapper[4935]: I1201 18:47:30.018623 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f58dd92e-7424-4a8b-a842-54d631dffd17-util\") pod \"f58dd92e-7424-4a8b-a842-54d631dffd17\" (UID: \"f58dd92e-7424-4a8b-a842-54d631dffd17\") " Dec 01 18:47:30 crc kubenswrapper[4935]: I1201 18:47:30.018986 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hc284\" (UniqueName: \"kubernetes.io/projected/f58dd92e-7424-4a8b-a842-54d631dffd17-kube-api-access-hc284\") pod \"f58dd92e-7424-4a8b-a842-54d631dffd17\" (UID: \"f58dd92e-7424-4a8b-a842-54d631dffd17\") " Dec 01 18:47:30 crc kubenswrapper[4935]: I1201 18:47:30.019093 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f58dd92e-7424-4a8b-a842-54d631dffd17-bundle\") pod \"f58dd92e-7424-4a8b-a842-54d631dffd17\" (UID: \"f58dd92e-7424-4a8b-a842-54d631dffd17\") " Dec 01 18:47:30 crc kubenswrapper[4935]: I1201 18:47:30.020374 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f58dd92e-7424-4a8b-a842-54d631dffd17-bundle" (OuterVolumeSpecName: "bundle") pod "f58dd92e-7424-4a8b-a842-54d631dffd17" (UID: "f58dd92e-7424-4a8b-a842-54d631dffd17"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:47:30 crc kubenswrapper[4935]: I1201 18:47:30.026961 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f58dd92e-7424-4a8b-a842-54d631dffd17-kube-api-access-hc284" (OuterVolumeSpecName: "kube-api-access-hc284") pod "f58dd92e-7424-4a8b-a842-54d631dffd17" (UID: "f58dd92e-7424-4a8b-a842-54d631dffd17"). InnerVolumeSpecName "kube-api-access-hc284". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:47:30 crc kubenswrapper[4935]: I1201 18:47:30.028095 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f58dd92e-7424-4a8b-a842-54d631dffd17-util" (OuterVolumeSpecName: "util") pod "f58dd92e-7424-4a8b-a842-54d631dffd17" (UID: "f58dd92e-7424-4a8b-a842-54d631dffd17"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:47:30 crc kubenswrapper[4935]: I1201 18:47:30.121446 4935 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f58dd92e-7424-4a8b-a842-54d631dffd17-util\") on node \"crc\" DevicePath \"\"" Dec 01 18:47:30 crc kubenswrapper[4935]: I1201 18:47:30.121523 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hc284\" (UniqueName: \"kubernetes.io/projected/f58dd92e-7424-4a8b-a842-54d631dffd17-kube-api-access-hc284\") on node \"crc\" DevicePath \"\"" Dec 01 18:47:30 crc kubenswrapper[4935]: I1201 18:47:30.121546 4935 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f58dd92e-7424-4a8b-a842-54d631dffd17-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:47:30 crc kubenswrapper[4935]: I1201 18:47:30.607367 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" event={"ID":"f58dd92e-7424-4a8b-a842-54d631dffd17","Type":"ContainerDied","Data":"6a187be42fab8d60abb228aa2ca5c0517b5b6e844c8d4be8d6bc23d9548782da"} Dec 01 18:47:30 crc kubenswrapper[4935]: I1201 18:47:30.607424 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a187be42fab8d60abb228aa2ca5c0517b5b6e844c8d4be8d6bc23d9548782da" Dec 01 18:47:30 crc kubenswrapper[4935]: I1201 18:47:30.607429 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj" Dec 01 18:47:32 crc kubenswrapper[4935]: I1201 18:47:32.888489 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-bcgrg"] Dec 01 18:47:32 crc kubenswrapper[4935]: E1201 18:47:32.889173 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f58dd92e-7424-4a8b-a842-54d631dffd17" containerName="extract" Dec 01 18:47:32 crc kubenswrapper[4935]: I1201 18:47:32.889190 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f58dd92e-7424-4a8b-a842-54d631dffd17" containerName="extract" Dec 01 18:47:32 crc kubenswrapper[4935]: E1201 18:47:32.889204 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f58dd92e-7424-4a8b-a842-54d631dffd17" containerName="util" Dec 01 18:47:32 crc kubenswrapper[4935]: I1201 18:47:32.889211 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f58dd92e-7424-4a8b-a842-54d631dffd17" containerName="util" Dec 01 18:47:32 crc kubenswrapper[4935]: E1201 18:47:32.889249 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f58dd92e-7424-4a8b-a842-54d631dffd17" containerName="pull" Dec 01 18:47:32 crc kubenswrapper[4935]: I1201 18:47:32.889257 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f58dd92e-7424-4a8b-a842-54d631dffd17" containerName="pull" Dec 01 18:47:32 crc kubenswrapper[4935]: I1201 18:47:32.889443 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f58dd92e-7424-4a8b-a842-54d631dffd17" containerName="extract" Dec 01 18:47:32 crc kubenswrapper[4935]: I1201 18:47:32.890127 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-bcgrg" Dec 01 18:47:32 crc kubenswrapper[4935]: I1201 18:47:32.891811 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-qtq97" Dec 01 18:47:32 crc kubenswrapper[4935]: I1201 18:47:32.892368 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-bcgrg"] Dec 01 18:47:32 crc kubenswrapper[4935]: I1201 18:47:32.893016 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 01 18:47:32 crc kubenswrapper[4935]: I1201 18:47:32.893175 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 01 18:47:32 crc kubenswrapper[4935]: I1201 18:47:32.968015 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpzqj\" (UniqueName: \"kubernetes.io/projected/2d0137b6-1254-4d31-a0fe-f8dc12b012f1-kube-api-access-kpzqj\") pod \"nmstate-operator-5b5b58f5c8-bcgrg\" (UID: \"2d0137b6-1254-4d31-a0fe-f8dc12b012f1\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-bcgrg" Dec 01 18:47:33 crc kubenswrapper[4935]: I1201 18:47:33.070756 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpzqj\" (UniqueName: \"kubernetes.io/projected/2d0137b6-1254-4d31-a0fe-f8dc12b012f1-kube-api-access-kpzqj\") pod \"nmstate-operator-5b5b58f5c8-bcgrg\" (UID: \"2d0137b6-1254-4d31-a0fe-f8dc12b012f1\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-bcgrg" Dec 01 18:47:33 crc kubenswrapper[4935]: I1201 18:47:33.103236 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpzqj\" (UniqueName: \"kubernetes.io/projected/2d0137b6-1254-4d31-a0fe-f8dc12b012f1-kube-api-access-kpzqj\") pod \"nmstate-operator-5b5b58f5c8-bcgrg\" (UID: \"2d0137b6-1254-4d31-a0fe-f8dc12b012f1\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-bcgrg" Dec 01 18:47:33 crc kubenswrapper[4935]: I1201 18:47:33.213699 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-bcgrg" Dec 01 18:47:33 crc kubenswrapper[4935]: I1201 18:47:33.528172 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-bcgrg"] Dec 01 18:47:33 crc kubenswrapper[4935]: I1201 18:47:33.535771 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 18:47:33 crc kubenswrapper[4935]: I1201 18:47:33.628191 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-bcgrg" event={"ID":"2d0137b6-1254-4d31-a0fe-f8dc12b012f1","Type":"ContainerStarted","Data":"68536b456abb5c2b25b1ce0b859e25ad041c3502d33785e746ab3d50952c1d85"} Dec 01 18:47:36 crc kubenswrapper[4935]: I1201 18:47:36.653645 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-bcgrg" event={"ID":"2d0137b6-1254-4d31-a0fe-f8dc12b012f1","Type":"ContainerStarted","Data":"470ea7965eb6b83d838476b7bdcf071354d8ec7ea4c40c5db29204b2df171cb5"} Dec 01 18:47:36 crc kubenswrapper[4935]: I1201 18:47:36.676013 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-bcgrg" podStartSLOduration=1.930808801 podStartE2EDuration="4.675994998s" podCreationTimestamp="2025-12-01 18:47:32 +0000 UTC" firstStartedPulling="2025-12-01 18:47:33.535559413 +0000 UTC m=+1067.557188672" lastFinishedPulling="2025-12-01 18:47:36.28074561 +0000 UTC m=+1070.302374869" observedRunningTime="2025-12-01 18:47:36.669803123 +0000 UTC m=+1070.691432382" watchObservedRunningTime="2025-12-01 18:47:36.675994998 +0000 UTC m=+1070.697624257" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.705128 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-2vqsx"] Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.707167 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2vqsx" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.711525 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-rsqqc" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.712994 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj"] Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.714845 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.718363 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.724020 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-2vqsx"] Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.739222 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-tjcvm"] Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.740554 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.746908 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj"] Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.836603 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rptwd\" (UniqueName: \"kubernetes.io/projected/3a58c39c-42c3-472c-8b8c-725b44b7ae0e-kube-api-access-rptwd\") pod \"nmstate-webhook-5f6d4c5ccb-n6zfj\" (UID: \"3a58c39c-42c3-472c-8b8c-725b44b7ae0e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.836701 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/3a58c39c-42c3-472c-8b8c-725b44b7ae0e-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-n6zfj\" (UID: \"3a58c39c-42c3-472c-8b8c-725b44b7ae0e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.836730 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/e2829c29-7d31-4124-87aa-e2eff8f2653c-nmstate-lock\") pod \"nmstate-handler-tjcvm\" (UID: \"e2829c29-7d31-4124-87aa-e2eff8f2653c\") " pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.836795 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmbl6\" (UniqueName: \"kubernetes.io/projected/e2829c29-7d31-4124-87aa-e2eff8f2653c-kube-api-access-zmbl6\") pod \"nmstate-handler-tjcvm\" (UID: \"e2829c29-7d31-4124-87aa-e2eff8f2653c\") " pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.836855 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsz8p\" (UniqueName: \"kubernetes.io/projected/d347948e-508e-4aeb-b082-c2d0f48ebace-kube-api-access-nsz8p\") pod \"nmstate-metrics-7f946cbc9-2vqsx\" (UID: \"d347948e-508e-4aeb-b082-c2d0f48ebace\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2vqsx" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.836882 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/e2829c29-7d31-4124-87aa-e2eff8f2653c-ovs-socket\") pod \"nmstate-handler-tjcvm\" (UID: \"e2829c29-7d31-4124-87aa-e2eff8f2653c\") " pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.836912 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/e2829c29-7d31-4124-87aa-e2eff8f2653c-dbus-socket\") pod \"nmstate-handler-tjcvm\" (UID: \"e2829c29-7d31-4124-87aa-e2eff8f2653c\") " pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.906202 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl"] Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.907108 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.909674 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.910115 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.910172 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-8v6c2" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.915159 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl"] Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.938898 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/3a58c39c-42c3-472c-8b8c-725b44b7ae0e-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-n6zfj\" (UID: \"3a58c39c-42c3-472c-8b8c-725b44b7ae0e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.938933 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/e2829c29-7d31-4124-87aa-e2eff8f2653c-nmstate-lock\") pod \"nmstate-handler-tjcvm\" (UID: \"e2829c29-7d31-4124-87aa-e2eff8f2653c\") " pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.938963 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7b73393b-b0a5-4ec7-8854-b7a0d5cbe268-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-7ncpl\" (UID: \"7b73393b-b0a5-4ec7-8854-b7a0d5cbe268\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.938989 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7b73393b-b0a5-4ec7-8854-b7a0d5cbe268-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-7ncpl\" (UID: \"7b73393b-b0a5-4ec7-8854-b7a0d5cbe268\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.939018 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmbl6\" (UniqueName: \"kubernetes.io/projected/e2829c29-7d31-4124-87aa-e2eff8f2653c-kube-api-access-zmbl6\") pod \"nmstate-handler-tjcvm\" (UID: \"e2829c29-7d31-4124-87aa-e2eff8f2653c\") " pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.939050 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsfhn\" (UniqueName: \"kubernetes.io/projected/7b73393b-b0a5-4ec7-8854-b7a0d5cbe268-kube-api-access-dsfhn\") pod \"nmstate-console-plugin-7fbb5f6569-7ncpl\" (UID: \"7b73393b-b0a5-4ec7-8854-b7a0d5cbe268\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.939079 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsz8p\" (UniqueName: \"kubernetes.io/projected/d347948e-508e-4aeb-b082-c2d0f48ebace-kube-api-access-nsz8p\") pod \"nmstate-metrics-7f946cbc9-2vqsx\" (UID: \"d347948e-508e-4aeb-b082-c2d0f48ebace\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2vqsx" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.939093 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/e2829c29-7d31-4124-87aa-e2eff8f2653c-ovs-socket\") pod \"nmstate-handler-tjcvm\" (UID: \"e2829c29-7d31-4124-87aa-e2eff8f2653c\") " pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.939109 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/e2829c29-7d31-4124-87aa-e2eff8f2653c-dbus-socket\") pod \"nmstate-handler-tjcvm\" (UID: \"e2829c29-7d31-4124-87aa-e2eff8f2653c\") " pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.939188 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rptwd\" (UniqueName: \"kubernetes.io/projected/3a58c39c-42c3-472c-8b8c-725b44b7ae0e-kube-api-access-rptwd\") pod \"nmstate-webhook-5f6d4c5ccb-n6zfj\" (UID: \"3a58c39c-42c3-472c-8b8c-725b44b7ae0e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" Dec 01 18:47:42 crc kubenswrapper[4935]: E1201 18:47:42.939506 4935 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 01 18:47:42 crc kubenswrapper[4935]: E1201 18:47:42.939546 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3a58c39c-42c3-472c-8b8c-725b44b7ae0e-tls-key-pair podName:3a58c39c-42c3-472c-8b8c-725b44b7ae0e nodeName:}" failed. No retries permitted until 2025-12-01 18:47:43.439530768 +0000 UTC m=+1077.461160027 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/3a58c39c-42c3-472c-8b8c-725b44b7ae0e-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-n6zfj" (UID: "3a58c39c-42c3-472c-8b8c-725b44b7ae0e") : secret "openshift-nmstate-webhook" not found Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.939572 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/e2829c29-7d31-4124-87aa-e2eff8f2653c-nmstate-lock\") pod \"nmstate-handler-tjcvm\" (UID: \"e2829c29-7d31-4124-87aa-e2eff8f2653c\") " pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.939817 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/e2829c29-7d31-4124-87aa-e2eff8f2653c-ovs-socket\") pod \"nmstate-handler-tjcvm\" (UID: \"e2829c29-7d31-4124-87aa-e2eff8f2653c\") " pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.940037 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/e2829c29-7d31-4124-87aa-e2eff8f2653c-dbus-socket\") pod \"nmstate-handler-tjcvm\" (UID: \"e2829c29-7d31-4124-87aa-e2eff8f2653c\") " pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.980267 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmbl6\" (UniqueName: \"kubernetes.io/projected/e2829c29-7d31-4124-87aa-e2eff8f2653c-kube-api-access-zmbl6\") pod \"nmstate-handler-tjcvm\" (UID: \"e2829c29-7d31-4124-87aa-e2eff8f2653c\") " pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.982243 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsz8p\" (UniqueName: \"kubernetes.io/projected/d347948e-508e-4aeb-b082-c2d0f48ebace-kube-api-access-nsz8p\") pod \"nmstate-metrics-7f946cbc9-2vqsx\" (UID: \"d347948e-508e-4aeb-b082-c2d0f48ebace\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2vqsx" Dec 01 18:47:42 crc kubenswrapper[4935]: I1201 18:47:42.997991 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rptwd\" (UniqueName: \"kubernetes.io/projected/3a58c39c-42c3-472c-8b8c-725b44b7ae0e-kube-api-access-rptwd\") pod \"nmstate-webhook-5f6d4c5ccb-n6zfj\" (UID: \"3a58c39c-42c3-472c-8b8c-725b44b7ae0e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.024688 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2vqsx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.040916 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsfhn\" (UniqueName: \"kubernetes.io/projected/7b73393b-b0a5-4ec7-8854-b7a0d5cbe268-kube-api-access-dsfhn\") pod \"nmstate-console-plugin-7fbb5f6569-7ncpl\" (UID: \"7b73393b-b0a5-4ec7-8854-b7a0d5cbe268\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.041092 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7b73393b-b0a5-4ec7-8854-b7a0d5cbe268-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-7ncpl\" (UID: \"7b73393b-b0a5-4ec7-8854-b7a0d5cbe268\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.041128 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7b73393b-b0a5-4ec7-8854-b7a0d5cbe268-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-7ncpl\" (UID: \"7b73393b-b0a5-4ec7-8854-b7a0d5cbe268\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" Dec 01 18:47:43 crc kubenswrapper[4935]: E1201 18:47:43.041638 4935 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 01 18:47:43 crc kubenswrapper[4935]: E1201 18:47:43.041717 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7b73393b-b0a5-4ec7-8854-b7a0d5cbe268-plugin-serving-cert podName:7b73393b-b0a5-4ec7-8854-b7a0d5cbe268 nodeName:}" failed. No retries permitted until 2025-12-01 18:47:43.541697727 +0000 UTC m=+1077.563326986 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/7b73393b-b0a5-4ec7-8854-b7a0d5cbe268-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-7ncpl" (UID: "7b73393b-b0a5-4ec7-8854-b7a0d5cbe268") : secret "plugin-serving-cert" not found Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.042205 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7b73393b-b0a5-4ec7-8854-b7a0d5cbe268-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-7ncpl\" (UID: \"7b73393b-b0a5-4ec7-8854-b7a0d5cbe268\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.083817 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.098094 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsfhn\" (UniqueName: \"kubernetes.io/projected/7b73393b-b0a5-4ec7-8854-b7a0d5cbe268-kube-api-access-dsfhn\") pod \"nmstate-console-plugin-7fbb5f6569-7ncpl\" (UID: \"7b73393b-b0a5-4ec7-8854-b7a0d5cbe268\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.263367 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-66ff47d4bb-794fx"] Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.264647 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.293583 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-66ff47d4bb-794fx"] Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.353736 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-serving-cert\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.354119 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n2nd\" (UniqueName: \"kubernetes.io/projected/f74d6018-3e94-4935-8e2b-de23ecdadecc-kube-api-access-8n2nd\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.354194 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-oauth-serving-cert\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.354236 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-service-ca\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.354277 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-oauth-config\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.354314 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-trusted-ca-bundle\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.354353 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-config\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.455259 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-serving-cert\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.455299 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n2nd\" (UniqueName: \"kubernetes.io/projected/f74d6018-3e94-4935-8e2b-de23ecdadecc-kube-api-access-8n2nd\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.455339 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-oauth-serving-cert\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.455370 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-service-ca\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.455401 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-oauth-config\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.455428 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-trusted-ca-bundle\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.455450 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-config\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.455515 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/3a58c39c-42c3-472c-8b8c-725b44b7ae0e-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-n6zfj\" (UID: \"3a58c39c-42c3-472c-8b8c-725b44b7ae0e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.456709 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-oauth-serving-cert\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.457425 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-trusted-ca-bundle\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.458086 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-config\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.458689 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-service-ca\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.459744 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/3a58c39c-42c3-472c-8b8c-725b44b7ae0e-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-n6zfj\" (UID: \"3a58c39c-42c3-472c-8b8c-725b44b7ae0e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.459903 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-serving-cert\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.460461 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-oauth-config\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.482197 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n2nd\" (UniqueName: \"kubernetes.io/projected/f74d6018-3e94-4935-8e2b-de23ecdadecc-kube-api-access-8n2nd\") pod \"console-66ff47d4bb-794fx\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.556756 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7b73393b-b0a5-4ec7-8854-b7a0d5cbe268-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-7ncpl\" (UID: \"7b73393b-b0a5-4ec7-8854-b7a0d5cbe268\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.559956 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7b73393b-b0a5-4ec7-8854-b7a0d5cbe268-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-7ncpl\" (UID: \"7b73393b-b0a5-4ec7-8854-b7a0d5cbe268\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.630293 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.646705 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.664698 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-2vqsx"] Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.740019 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2vqsx" event={"ID":"d347948e-508e-4aeb-b082-c2d0f48ebace","Type":"ContainerStarted","Data":"674a484424a175c50d20e5c861aa8903a2264485aec114a58dd16c8dbb19d14a"} Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.741500 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-tjcvm" event={"ID":"e2829c29-7d31-4124-87aa-e2eff8f2653c","Type":"ContainerStarted","Data":"3c040dc4a028b1d4dc65d4c188ac4683f8935026ac11abe47946e2394820741f"} Dec 01 18:47:43 crc kubenswrapper[4935]: I1201 18:47:43.845796 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" Dec 01 18:47:44 crc kubenswrapper[4935]: I1201 18:47:44.013530 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj"] Dec 01 18:47:44 crc kubenswrapper[4935]: I1201 18:47:44.087778 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-66ff47d4bb-794fx"] Dec 01 18:47:44 crc kubenswrapper[4935]: I1201 18:47:44.361758 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl"] Dec 01 18:47:44 crc kubenswrapper[4935]: W1201 18:47:44.369517 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b73393b_b0a5_4ec7_8854_b7a0d5cbe268.slice/crio-2d1f29573cffd5fe1d7756c9fa14a548e6963234b63fafda7ccf331a915a8fb2 WatchSource:0}: Error finding container 2d1f29573cffd5fe1d7756c9fa14a548e6963234b63fafda7ccf331a915a8fb2: Status 404 returned error can't find the container with id 2d1f29573cffd5fe1d7756c9fa14a548e6963234b63fafda7ccf331a915a8fb2 Dec 01 18:47:44 crc kubenswrapper[4935]: I1201 18:47:44.750678 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" event={"ID":"7b73393b-b0a5-4ec7-8854-b7a0d5cbe268","Type":"ContainerStarted","Data":"2d1f29573cffd5fe1d7756c9fa14a548e6963234b63fafda7ccf331a915a8fb2"} Dec 01 18:47:44 crc kubenswrapper[4935]: I1201 18:47:44.752334 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-66ff47d4bb-794fx" event={"ID":"f74d6018-3e94-4935-8e2b-de23ecdadecc","Type":"ContainerStarted","Data":"2e075dd5f9a4b265cc14bd97b8bf26aa140bc863ebf805056878e0968dbfd8d3"} Dec 01 18:47:44 crc kubenswrapper[4935]: I1201 18:47:44.752379 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-66ff47d4bb-794fx" event={"ID":"f74d6018-3e94-4935-8e2b-de23ecdadecc","Type":"ContainerStarted","Data":"19a146740b92e48ab3882afb3e6ce01e7e0128658178f04438a2def632e6c522"} Dec 01 18:47:44 crc kubenswrapper[4935]: I1201 18:47:44.753929 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" event={"ID":"3a58c39c-42c3-472c-8b8c-725b44b7ae0e","Type":"ContainerStarted","Data":"db9dc15c44ce722773ab7ffdcb6d830e4618513ee6b37c43ea5ce82fd15e55c1"} Dec 01 18:47:44 crc kubenswrapper[4935]: I1201 18:47:44.771648 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-66ff47d4bb-794fx" podStartSLOduration=1.771630512 podStartE2EDuration="1.771630512s" podCreationTimestamp="2025-12-01 18:47:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:47:44.769930508 +0000 UTC m=+1078.791559787" watchObservedRunningTime="2025-12-01 18:47:44.771630512 +0000 UTC m=+1078.793259771" Dec 01 18:47:48 crc kubenswrapper[4935]: I1201 18:47:48.788073 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" event={"ID":"7b73393b-b0a5-4ec7-8854-b7a0d5cbe268","Type":"ContainerStarted","Data":"6a508486ffa9d9b1a4d984d01df0fcd04d3f931480c6112340b6bdc25e47607b"} Dec 01 18:47:48 crc kubenswrapper[4935]: I1201 18:47:48.789693 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2vqsx" event={"ID":"d347948e-508e-4aeb-b082-c2d0f48ebace","Type":"ContainerStarted","Data":"801b551dcc9f95578a9d90959d2b5ee0d4c56b1d99135e5256f236199c2bd2bf"} Dec 01 18:47:48 crc kubenswrapper[4935]: I1201 18:47:48.791859 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-tjcvm" event={"ID":"e2829c29-7d31-4124-87aa-e2eff8f2653c","Type":"ContainerStarted","Data":"6c21e3a44ed66dec829e2ed8ab37ed531859c116d814359f0ff7518fe66780ab"} Dec 01 18:47:48 crc kubenswrapper[4935]: I1201 18:47:48.792159 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:48 crc kubenswrapper[4935]: I1201 18:47:48.795962 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" event={"ID":"3a58c39c-42c3-472c-8b8c-725b44b7ae0e","Type":"ContainerStarted","Data":"b13224faffbe66b271c570fd9c6eda715d098278783f2fcb82800937382996ed"} Dec 01 18:47:48 crc kubenswrapper[4935]: I1201 18:47:48.796192 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" Dec 01 18:47:48 crc kubenswrapper[4935]: I1201 18:47:48.810213 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-7ncpl" podStartSLOduration=3.758810823 podStartE2EDuration="6.810195066s" podCreationTimestamp="2025-12-01 18:47:42 +0000 UTC" firstStartedPulling="2025-12-01 18:47:44.372071026 +0000 UTC m=+1078.393700285" lastFinishedPulling="2025-12-01 18:47:47.423455229 +0000 UTC m=+1081.445084528" observedRunningTime="2025-12-01 18:47:48.80749457 +0000 UTC m=+1082.829123829" watchObservedRunningTime="2025-12-01 18:47:48.810195066 +0000 UTC m=+1082.831824325" Dec 01 18:47:48 crc kubenswrapper[4935]: I1201 18:47:48.836551 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-tjcvm" podStartSLOduration=2.591654438 podStartE2EDuration="6.836529281s" podCreationTimestamp="2025-12-01 18:47:42 +0000 UTC" firstStartedPulling="2025-12-01 18:47:43.178252515 +0000 UTC m=+1077.199881774" lastFinishedPulling="2025-12-01 18:47:47.423127348 +0000 UTC m=+1081.444756617" observedRunningTime="2025-12-01 18:47:48.834448364 +0000 UTC m=+1082.856077623" watchObservedRunningTime="2025-12-01 18:47:48.836529281 +0000 UTC m=+1082.858158540" Dec 01 18:47:48 crc kubenswrapper[4935]: I1201 18:47:48.867795 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" podStartSLOduration=3.461358305 podStartE2EDuration="6.867777611s" podCreationTimestamp="2025-12-01 18:47:42 +0000 UTC" firstStartedPulling="2025-12-01 18:47:44.022977121 +0000 UTC m=+1078.044606380" lastFinishedPulling="2025-12-01 18:47:47.429396387 +0000 UTC m=+1081.451025686" observedRunningTime="2025-12-01 18:47:48.859928222 +0000 UTC m=+1082.881557491" watchObservedRunningTime="2025-12-01 18:47:48.867777611 +0000 UTC m=+1082.889406860" Dec 01 18:47:53 crc kubenswrapper[4935]: I1201 18:47:53.110849 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-tjcvm" Dec 01 18:47:53 crc kubenswrapper[4935]: I1201 18:47:53.631309 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:53 crc kubenswrapper[4935]: I1201 18:47:53.631762 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:53 crc kubenswrapper[4935]: I1201 18:47:53.639029 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:53 crc kubenswrapper[4935]: I1201 18:47:53.852802 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:47:53 crc kubenswrapper[4935]: I1201 18:47:53.922883 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-7656789b46-jfxcx"] Dec 01 18:47:55 crc kubenswrapper[4935]: I1201 18:47:55.868036 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2vqsx" event={"ID":"d347948e-508e-4aeb-b082-c2d0f48ebace","Type":"ContainerStarted","Data":"f12c7025940b7f25f25d5a430e029c8cc1cb2026776ed79cbed1f959ec80d146"} Dec 01 18:47:55 crc kubenswrapper[4935]: I1201 18:47:55.897978 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2vqsx" podStartSLOduration=2.292415732 podStartE2EDuration="13.897952653s" podCreationTimestamp="2025-12-01 18:47:42 +0000 UTC" firstStartedPulling="2025-12-01 18:47:43.673099801 +0000 UTC m=+1077.694729060" lastFinishedPulling="2025-12-01 18:47:55.278636682 +0000 UTC m=+1089.300265981" observedRunningTime="2025-12-01 18:47:55.897603511 +0000 UTC m=+1089.919232830" watchObservedRunningTime="2025-12-01 18:47:55.897952653 +0000 UTC m=+1089.919581952" Dec 01 18:48:03 crc kubenswrapper[4935]: I1201 18:48:03.656356 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-n6zfj" Dec 01 18:48:18 crc kubenswrapper[4935]: I1201 18:48:18.977958 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-7656789b46-jfxcx" podUID="6f2c0d54-c995-41fe-8ffb-36376e7aed2a" containerName="console" containerID="cri-o://5c1df9854d10f362a38f17eacabce2a62ae7b4ed636e71a988285403bf5c5e91" gracePeriod=15 Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.458338 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-7656789b46-jfxcx_6f2c0d54-c995-41fe-8ffb-36376e7aed2a/console/0.log" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.458605 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.502942 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-config\") pod \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.503055 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-service-ca\") pod \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.503119 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-serving-cert\") pod \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.503263 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-oauth-config\") pod \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.503297 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-trusted-ca-bundle\") pod \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.503341 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lshcp\" (UniqueName: \"kubernetes.io/projected/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-kube-api-access-lshcp\") pod \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.503377 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-oauth-serving-cert\") pod \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\" (UID: \"6f2c0d54-c995-41fe-8ffb-36376e7aed2a\") " Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.504015 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-config" (OuterVolumeSpecName: "console-config") pod "6f2c0d54-c995-41fe-8ffb-36376e7aed2a" (UID: "6f2c0d54-c995-41fe-8ffb-36376e7aed2a"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.504175 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6f2c0d54-c995-41fe-8ffb-36376e7aed2a" (UID: "6f2c0d54-c995-41fe-8ffb-36376e7aed2a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.504545 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "6f2c0d54-c995-41fe-8ffb-36376e7aed2a" (UID: "6f2c0d54-c995-41fe-8ffb-36376e7aed2a"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.504597 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-service-ca" (OuterVolumeSpecName: "service-ca") pod "6f2c0d54-c995-41fe-8ffb-36376e7aed2a" (UID: "6f2c0d54-c995-41fe-8ffb-36376e7aed2a"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.511375 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "6f2c0d54-c995-41fe-8ffb-36376e7aed2a" (UID: "6f2c0d54-c995-41fe-8ffb-36376e7aed2a"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.513504 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "6f2c0d54-c995-41fe-8ffb-36376e7aed2a" (UID: "6f2c0d54-c995-41fe-8ffb-36376e7aed2a"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.518542 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-kube-api-access-lshcp" (OuterVolumeSpecName: "kube-api-access-lshcp") pod "6f2c0d54-c995-41fe-8ffb-36376e7aed2a" (UID: "6f2c0d54-c995-41fe-8ffb-36376e7aed2a"). InnerVolumeSpecName "kube-api-access-lshcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.606123 4935 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.606192 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.606206 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lshcp\" (UniqueName: \"kubernetes.io/projected/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-kube-api-access-lshcp\") on node \"crc\" DevicePath \"\"" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.606219 4935 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.606230 4935 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.606241 4935 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:48:19 crc kubenswrapper[4935]: I1201 18:48:19.606252 4935 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f2c0d54-c995-41fe-8ffb-36376e7aed2a-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:48:20 crc kubenswrapper[4935]: I1201 18:48:20.111266 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-7656789b46-jfxcx_6f2c0d54-c995-41fe-8ffb-36376e7aed2a/console/0.log" Dec 01 18:48:20 crc kubenswrapper[4935]: I1201 18:48:20.111581 4935 generic.go:334] "Generic (PLEG): container finished" podID="6f2c0d54-c995-41fe-8ffb-36376e7aed2a" containerID="5c1df9854d10f362a38f17eacabce2a62ae7b4ed636e71a988285403bf5c5e91" exitCode=2 Dec 01 18:48:20 crc kubenswrapper[4935]: I1201 18:48:20.111617 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7656789b46-jfxcx" event={"ID":"6f2c0d54-c995-41fe-8ffb-36376e7aed2a","Type":"ContainerDied","Data":"5c1df9854d10f362a38f17eacabce2a62ae7b4ed636e71a988285403bf5c5e91"} Dec 01 18:48:20 crc kubenswrapper[4935]: I1201 18:48:20.111639 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7656789b46-jfxcx" Dec 01 18:48:20 crc kubenswrapper[4935]: I1201 18:48:20.111655 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7656789b46-jfxcx" event={"ID":"6f2c0d54-c995-41fe-8ffb-36376e7aed2a","Type":"ContainerDied","Data":"af1f926af91b610a67517d1fd1022e06e4440ff63007fc9ee8e4e1d30e732494"} Dec 01 18:48:20 crc kubenswrapper[4935]: I1201 18:48:20.111683 4935 scope.go:117] "RemoveContainer" containerID="5c1df9854d10f362a38f17eacabce2a62ae7b4ed636e71a988285403bf5c5e91" Dec 01 18:48:20 crc kubenswrapper[4935]: I1201 18:48:20.132867 4935 scope.go:117] "RemoveContainer" containerID="5c1df9854d10f362a38f17eacabce2a62ae7b4ed636e71a988285403bf5c5e91" Dec 01 18:48:20 crc kubenswrapper[4935]: E1201 18:48:20.136201 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c1df9854d10f362a38f17eacabce2a62ae7b4ed636e71a988285403bf5c5e91\": container with ID starting with 5c1df9854d10f362a38f17eacabce2a62ae7b4ed636e71a988285403bf5c5e91 not found: ID does not exist" containerID="5c1df9854d10f362a38f17eacabce2a62ae7b4ed636e71a988285403bf5c5e91" Dec 01 18:48:20 crc kubenswrapper[4935]: I1201 18:48:20.136244 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c1df9854d10f362a38f17eacabce2a62ae7b4ed636e71a988285403bf5c5e91"} err="failed to get container status \"5c1df9854d10f362a38f17eacabce2a62ae7b4ed636e71a988285403bf5c5e91\": rpc error: code = NotFound desc = could not find container \"5c1df9854d10f362a38f17eacabce2a62ae7b4ed636e71a988285403bf5c5e91\": container with ID starting with 5c1df9854d10f362a38f17eacabce2a62ae7b4ed636e71a988285403bf5c5e91 not found: ID does not exist" Dec 01 18:48:20 crc kubenswrapper[4935]: I1201 18:48:20.144942 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-7656789b46-jfxcx"] Dec 01 18:48:20 crc kubenswrapper[4935]: I1201 18:48:20.149759 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-7656789b46-jfxcx"] Dec 01 18:48:20 crc kubenswrapper[4935]: I1201 18:48:20.522747 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f2c0d54-c995-41fe-8ffb-36376e7aed2a" path="/var/lib/kubelet/pods/6f2c0d54-c995-41fe-8ffb-36376e7aed2a/volumes" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.731692 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9"] Dec 01 18:48:24 crc kubenswrapper[4935]: E1201 18:48:24.734073 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f2c0d54-c995-41fe-8ffb-36376e7aed2a" containerName="console" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.734099 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f2c0d54-c995-41fe-8ffb-36376e7aed2a" containerName="console" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.734291 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f2c0d54-c995-41fe-8ffb-36376e7aed2a" containerName="console" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.736781 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.740494 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.747469 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9"] Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.793493 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9\" (UID: \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.793639 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9\" (UID: \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.793762 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lt2cl\" (UniqueName: \"kubernetes.io/projected/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-kube-api-access-lt2cl\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9\" (UID: \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.895214 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lt2cl\" (UniqueName: \"kubernetes.io/projected/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-kube-api-access-lt2cl\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9\" (UID: \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.895340 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9\" (UID: \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.895376 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9\" (UID: \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.896117 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9\" (UID: \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.896132 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9\" (UID: \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" Dec 01 18:48:24 crc kubenswrapper[4935]: I1201 18:48:24.916269 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lt2cl\" (UniqueName: \"kubernetes.io/projected/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-kube-api-access-lt2cl\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9\" (UID: \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" Dec 01 18:48:25 crc kubenswrapper[4935]: I1201 18:48:25.063222 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" Dec 01 18:48:25 crc kubenswrapper[4935]: I1201 18:48:25.514491 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9"] Dec 01 18:48:26 crc kubenswrapper[4935]: I1201 18:48:26.174393 4935 generic.go:334] "Generic (PLEG): container finished" podID="d856ea4e-cbbe-41e2-9e51-15a2efa53b2b" containerID="1d21cc3578c21f01566270f941b3ca7e033265b51a16dde32ffbd5d3dc9db1ec" exitCode=0 Dec 01 18:48:26 crc kubenswrapper[4935]: I1201 18:48:26.174599 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" event={"ID":"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b","Type":"ContainerDied","Data":"1d21cc3578c21f01566270f941b3ca7e033265b51a16dde32ffbd5d3dc9db1ec"} Dec 01 18:48:26 crc kubenswrapper[4935]: I1201 18:48:26.174746 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" event={"ID":"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b","Type":"ContainerStarted","Data":"4c9a03b57b437edd6c9d2b076785bfbbf149fc155e6b8488d5c4e9f041dfe232"} Dec 01 18:48:28 crc kubenswrapper[4935]: I1201 18:48:28.190461 4935 generic.go:334] "Generic (PLEG): container finished" podID="d856ea4e-cbbe-41e2-9e51-15a2efa53b2b" containerID="aabea7e75d9a49a002955a00fb440f2350d1afdfcf6a15ca2787eb0706674bad" exitCode=0 Dec 01 18:48:28 crc kubenswrapper[4935]: I1201 18:48:28.190566 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" event={"ID":"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b","Type":"ContainerDied","Data":"aabea7e75d9a49a002955a00fb440f2350d1afdfcf6a15ca2787eb0706674bad"} Dec 01 18:48:29 crc kubenswrapper[4935]: I1201 18:48:29.202233 4935 generic.go:334] "Generic (PLEG): container finished" podID="d856ea4e-cbbe-41e2-9e51-15a2efa53b2b" containerID="45152126757ca7e8981d0c8b793120a022fbee4af27875dccc9b72beeee52c50" exitCode=0 Dec 01 18:48:29 crc kubenswrapper[4935]: I1201 18:48:29.202280 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" event={"ID":"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b","Type":"ContainerDied","Data":"45152126757ca7e8981d0c8b793120a022fbee4af27875dccc9b72beeee52c50"} Dec 01 18:48:30 crc kubenswrapper[4935]: I1201 18:48:30.576264 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" Dec 01 18:48:30 crc kubenswrapper[4935]: I1201 18:48:30.699772 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-util\") pod \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\" (UID: \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\") " Dec 01 18:48:30 crc kubenswrapper[4935]: I1201 18:48:30.699889 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lt2cl\" (UniqueName: \"kubernetes.io/projected/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-kube-api-access-lt2cl\") pod \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\" (UID: \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\") " Dec 01 18:48:30 crc kubenswrapper[4935]: I1201 18:48:30.699921 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-bundle\") pod \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\" (UID: \"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b\") " Dec 01 18:48:30 crc kubenswrapper[4935]: I1201 18:48:30.700814 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-bundle" (OuterVolumeSpecName: "bundle") pod "d856ea4e-cbbe-41e2-9e51-15a2efa53b2b" (UID: "d856ea4e-cbbe-41e2-9e51-15a2efa53b2b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:48:30 crc kubenswrapper[4935]: I1201 18:48:30.701872 4935 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:48:30 crc kubenswrapper[4935]: I1201 18:48:30.706993 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-kube-api-access-lt2cl" (OuterVolumeSpecName: "kube-api-access-lt2cl") pod "d856ea4e-cbbe-41e2-9e51-15a2efa53b2b" (UID: "d856ea4e-cbbe-41e2-9e51-15a2efa53b2b"). InnerVolumeSpecName "kube-api-access-lt2cl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:48:30 crc kubenswrapper[4935]: I1201 18:48:30.713638 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-util" (OuterVolumeSpecName: "util") pod "d856ea4e-cbbe-41e2-9e51-15a2efa53b2b" (UID: "d856ea4e-cbbe-41e2-9e51-15a2efa53b2b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:48:30 crc kubenswrapper[4935]: I1201 18:48:30.803469 4935 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-util\") on node \"crc\" DevicePath \"\"" Dec 01 18:48:30 crc kubenswrapper[4935]: I1201 18:48:30.803507 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lt2cl\" (UniqueName: \"kubernetes.io/projected/d856ea4e-cbbe-41e2-9e51-15a2efa53b2b-kube-api-access-lt2cl\") on node \"crc\" DevicePath \"\"" Dec 01 18:48:31 crc kubenswrapper[4935]: I1201 18:48:31.222795 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" event={"ID":"d856ea4e-cbbe-41e2-9e51-15a2efa53b2b","Type":"ContainerDied","Data":"4c9a03b57b437edd6c9d2b076785bfbbf149fc155e6b8488d5c4e9f041dfe232"} Dec 01 18:48:31 crc kubenswrapper[4935]: I1201 18:48:31.222844 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c9a03b57b437edd6c9d2b076785bfbbf149fc155e6b8488d5c4e9f041dfe232" Dec 01 18:48:31 crc kubenswrapper[4935]: I1201 18:48:31.222891 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.007800 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-788fff765c-47cxx"] Dec 01 18:48:40 crc kubenswrapper[4935]: E1201 18:48:40.008599 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d856ea4e-cbbe-41e2-9e51-15a2efa53b2b" containerName="pull" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.008612 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d856ea4e-cbbe-41e2-9e51-15a2efa53b2b" containerName="pull" Dec 01 18:48:40 crc kubenswrapper[4935]: E1201 18:48:40.008621 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d856ea4e-cbbe-41e2-9e51-15a2efa53b2b" containerName="extract" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.008626 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d856ea4e-cbbe-41e2-9e51-15a2efa53b2b" containerName="extract" Dec 01 18:48:40 crc kubenswrapper[4935]: E1201 18:48:40.008639 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d856ea4e-cbbe-41e2-9e51-15a2efa53b2b" containerName="util" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.008646 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d856ea4e-cbbe-41e2-9e51-15a2efa53b2b" containerName="util" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.008788 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="d856ea4e-cbbe-41e2-9e51-15a2efa53b2b" containerName="extract" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.009324 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.015813 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.015836 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.015944 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.016105 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-9wsns" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.016648 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.030730 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-788fff765c-47cxx"] Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.160904 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e01c9df-7ad5-47f7-82b8-3886841341a8-apiservice-cert\") pod \"metallb-operator-controller-manager-788fff765c-47cxx\" (UID: \"6e01c9df-7ad5-47f7-82b8-3886841341a8\") " pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.160978 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e01c9df-7ad5-47f7-82b8-3886841341a8-webhook-cert\") pod \"metallb-operator-controller-manager-788fff765c-47cxx\" (UID: \"6e01c9df-7ad5-47f7-82b8-3886841341a8\") " pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.161044 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45g4j\" (UniqueName: \"kubernetes.io/projected/6e01c9df-7ad5-47f7-82b8-3886841341a8-kube-api-access-45g4j\") pod \"metallb-operator-controller-manager-788fff765c-47cxx\" (UID: \"6e01c9df-7ad5-47f7-82b8-3886841341a8\") " pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.262067 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45g4j\" (UniqueName: \"kubernetes.io/projected/6e01c9df-7ad5-47f7-82b8-3886841341a8-kube-api-access-45g4j\") pod \"metallb-operator-controller-manager-788fff765c-47cxx\" (UID: \"6e01c9df-7ad5-47f7-82b8-3886841341a8\") " pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.262223 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e01c9df-7ad5-47f7-82b8-3886841341a8-apiservice-cert\") pod \"metallb-operator-controller-manager-788fff765c-47cxx\" (UID: \"6e01c9df-7ad5-47f7-82b8-3886841341a8\") " pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.262254 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e01c9df-7ad5-47f7-82b8-3886841341a8-webhook-cert\") pod \"metallb-operator-controller-manager-788fff765c-47cxx\" (UID: \"6e01c9df-7ad5-47f7-82b8-3886841341a8\") " pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.268132 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e01c9df-7ad5-47f7-82b8-3886841341a8-webhook-cert\") pod \"metallb-operator-controller-manager-788fff765c-47cxx\" (UID: \"6e01c9df-7ad5-47f7-82b8-3886841341a8\") " pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.268313 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e01c9df-7ad5-47f7-82b8-3886841341a8-apiservice-cert\") pod \"metallb-operator-controller-manager-788fff765c-47cxx\" (UID: \"6e01c9df-7ad5-47f7-82b8-3886841341a8\") " pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.290039 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45g4j\" (UniqueName: \"kubernetes.io/projected/6e01c9df-7ad5-47f7-82b8-3886841341a8-kube-api-access-45g4j\") pod \"metallb-operator-controller-manager-788fff765c-47cxx\" (UID: \"6e01c9df-7ad5-47f7-82b8-3886841341a8\") " pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.329289 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.391686 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6"] Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.404647 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.408674 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.408974 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.409245 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-7l86r" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.432031 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6"] Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.470644 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7195d61c-817c-43c7-8cc4-09ac712ba59f-webhook-cert\") pod \"metallb-operator-webhook-server-7687bf949b-vrmk6\" (UID: \"7195d61c-817c-43c7-8cc4-09ac712ba59f\") " pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.470945 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7195d61c-817c-43c7-8cc4-09ac712ba59f-apiservice-cert\") pod \"metallb-operator-webhook-server-7687bf949b-vrmk6\" (UID: \"7195d61c-817c-43c7-8cc4-09ac712ba59f\") " pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.470983 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5b8m\" (UniqueName: \"kubernetes.io/projected/7195d61c-817c-43c7-8cc4-09ac712ba59f-kube-api-access-r5b8m\") pod \"metallb-operator-webhook-server-7687bf949b-vrmk6\" (UID: \"7195d61c-817c-43c7-8cc4-09ac712ba59f\") " pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.580043 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7195d61c-817c-43c7-8cc4-09ac712ba59f-webhook-cert\") pod \"metallb-operator-webhook-server-7687bf949b-vrmk6\" (UID: \"7195d61c-817c-43c7-8cc4-09ac712ba59f\") " pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.580160 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7195d61c-817c-43c7-8cc4-09ac712ba59f-apiservice-cert\") pod \"metallb-operator-webhook-server-7687bf949b-vrmk6\" (UID: \"7195d61c-817c-43c7-8cc4-09ac712ba59f\") " pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.580187 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5b8m\" (UniqueName: \"kubernetes.io/projected/7195d61c-817c-43c7-8cc4-09ac712ba59f-kube-api-access-r5b8m\") pod \"metallb-operator-webhook-server-7687bf949b-vrmk6\" (UID: \"7195d61c-817c-43c7-8cc4-09ac712ba59f\") " pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.601109 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7195d61c-817c-43c7-8cc4-09ac712ba59f-apiservice-cert\") pod \"metallb-operator-webhook-server-7687bf949b-vrmk6\" (UID: \"7195d61c-817c-43c7-8cc4-09ac712ba59f\") " pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.601933 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7195d61c-817c-43c7-8cc4-09ac712ba59f-webhook-cert\") pod \"metallb-operator-webhook-server-7687bf949b-vrmk6\" (UID: \"7195d61c-817c-43c7-8cc4-09ac712ba59f\") " pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.635918 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5b8m\" (UniqueName: \"kubernetes.io/projected/7195d61c-817c-43c7-8cc4-09ac712ba59f-kube-api-access-r5b8m\") pod \"metallb-operator-webhook-server-7687bf949b-vrmk6\" (UID: \"7195d61c-817c-43c7-8cc4-09ac712ba59f\") " pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.757202 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" Dec 01 18:48:40 crc kubenswrapper[4935]: I1201 18:48:40.939189 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-788fff765c-47cxx"] Dec 01 18:48:41 crc kubenswrapper[4935]: I1201 18:48:41.170916 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6"] Dec 01 18:48:41 crc kubenswrapper[4935]: W1201 18:48:41.174385 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7195d61c_817c_43c7_8cc4_09ac712ba59f.slice/crio-b32289faf27c77fdb1651aeb85765714078dfa5e05fbaa8883b62e5c60c32767 WatchSource:0}: Error finding container b32289faf27c77fdb1651aeb85765714078dfa5e05fbaa8883b62e5c60c32767: Status 404 returned error can't find the container with id b32289faf27c77fdb1651aeb85765714078dfa5e05fbaa8883b62e5c60c32767 Dec 01 18:48:41 crc kubenswrapper[4935]: I1201 18:48:41.293274 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" event={"ID":"6e01c9df-7ad5-47f7-82b8-3886841341a8","Type":"ContainerStarted","Data":"3baff1db6232e7bea4b690461e8bd49905af7c73510e3fe8f51e50852e636b25"} Dec 01 18:48:41 crc kubenswrapper[4935]: I1201 18:48:41.294534 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" event={"ID":"7195d61c-817c-43c7-8cc4-09ac712ba59f","Type":"ContainerStarted","Data":"b32289faf27c77fdb1651aeb85765714078dfa5e05fbaa8883b62e5c60c32767"} Dec 01 18:48:47 crc kubenswrapper[4935]: I1201 18:48:47.357028 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" event={"ID":"7195d61c-817c-43c7-8cc4-09ac712ba59f","Type":"ContainerStarted","Data":"71f2f03daccf480706ae1d23d5a4be91250ecbf409de74b337f5393537855d9f"} Dec 01 18:48:47 crc kubenswrapper[4935]: I1201 18:48:47.357636 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" Dec 01 18:48:47 crc kubenswrapper[4935]: I1201 18:48:47.361963 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" event={"ID":"6e01c9df-7ad5-47f7-82b8-3886841341a8","Type":"ContainerStarted","Data":"c5caa467c8a947f9a08ce11716d4fc92eefd8068e31e06c70171ab16ab54a453"} Dec 01 18:48:47 crc kubenswrapper[4935]: I1201 18:48:47.362194 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" Dec 01 18:48:47 crc kubenswrapper[4935]: I1201 18:48:47.385065 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" podStartSLOduration=1.9142153400000002 podStartE2EDuration="7.385041384s" podCreationTimestamp="2025-12-01 18:48:40 +0000 UTC" firstStartedPulling="2025-12-01 18:48:41.177058514 +0000 UTC m=+1135.198687813" lastFinishedPulling="2025-12-01 18:48:46.647884408 +0000 UTC m=+1140.669513857" observedRunningTime="2025-12-01 18:48:47.384237349 +0000 UTC m=+1141.405866668" watchObservedRunningTime="2025-12-01 18:48:47.385041384 +0000 UTC m=+1141.406670653" Dec 01 18:48:47 crc kubenswrapper[4935]: I1201 18:48:47.453266 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" podStartSLOduration=2.774690728 podStartE2EDuration="8.453239596s" podCreationTimestamp="2025-12-01 18:48:39 +0000 UTC" firstStartedPulling="2025-12-01 18:48:40.95262032 +0000 UTC m=+1134.974249579" lastFinishedPulling="2025-12-01 18:48:46.631169188 +0000 UTC m=+1140.652798447" observedRunningTime="2025-12-01 18:48:47.448083583 +0000 UTC m=+1141.469712852" watchObservedRunningTime="2025-12-01 18:48:47.453239596 +0000 UTC m=+1141.474868875" Dec 01 18:48:54 crc kubenswrapper[4935]: I1201 18:48:54.345847 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:48:54 crc kubenswrapper[4935]: I1201 18:48:54.346423 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:49:00 crc kubenswrapper[4935]: I1201 18:49:00.762250 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-7687bf949b-vrmk6" Dec 01 18:49:20 crc kubenswrapper[4935]: I1201 18:49:20.332614 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-788fff765c-47cxx" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.069736 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-5zlbd"] Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.074561 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.075440 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l"] Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.076415 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.077181 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-6l97z" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.078493 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.078618 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.078637 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.101563 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l"] Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.167624 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-fwmt4"] Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.169087 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.171123 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.171138 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.171181 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-w8d8x" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.174872 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.177520 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/65bd8eca-d900-4b1b-a859-bcbde52e4bea-frr-startup\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.177586 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/65bd8eca-d900-4b1b-a859-bcbde52e4bea-metrics\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.177666 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8rlt\" (UniqueName: \"kubernetes.io/projected/65bd8eca-d900-4b1b-a859-bcbde52e4bea-kube-api-access-p8rlt\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.177701 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/65bd8eca-d900-4b1b-a859-bcbde52e4bea-frr-conf\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.177747 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0858f09c-2e20-4861-af53-c1df064a5c48-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-6st2l\" (UID: \"0858f09c-2e20-4861-af53-c1df064a5c48\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.177774 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/65bd8eca-d900-4b1b-a859-bcbde52e4bea-frr-sockets\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.177833 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65bd8eca-d900-4b1b-a859-bcbde52e4bea-metrics-certs\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.177863 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwf7n\" (UniqueName: \"kubernetes.io/projected/0858f09c-2e20-4861-af53-c1df064a5c48-kube-api-access-hwf7n\") pod \"frr-k8s-webhook-server-7fcb986d4-6st2l\" (UID: \"0858f09c-2e20-4861-af53-c1df064a5c48\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.177887 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/65bd8eca-d900-4b1b-a859-bcbde52e4bea-reloader\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.187083 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-zhs5b"] Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.191129 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-zhs5b" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.192962 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.202365 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-zhs5b"] Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279518 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/65bd8eca-d900-4b1b-a859-bcbde52e4bea-metrics\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279586 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gjdb\" (UniqueName: \"kubernetes.io/projected/0c91c45c-1d3c-458a-9725-fcf4529e5db1-kube-api-access-8gjdb\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279617 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3c2a298c-be4d-4b96-82f2-78df48943e0e-metrics-certs\") pod \"controller-f8648f98b-zhs5b\" (UID: \"3c2a298c-be4d-4b96-82f2-78df48943e0e\") " pod="metallb-system/controller-f8648f98b-zhs5b" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279636 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3c2a298c-be4d-4b96-82f2-78df48943e0e-cert\") pod \"controller-f8648f98b-zhs5b\" (UID: \"3c2a298c-be4d-4b96-82f2-78df48943e0e\") " pod="metallb-system/controller-f8648f98b-zhs5b" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279676 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8rlt\" (UniqueName: \"kubernetes.io/projected/65bd8eca-d900-4b1b-a859-bcbde52e4bea-kube-api-access-p8rlt\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279701 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/65bd8eca-d900-4b1b-a859-bcbde52e4bea-frr-conf\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279748 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhsvp\" (UniqueName: \"kubernetes.io/projected/3c2a298c-be4d-4b96-82f2-78df48943e0e-kube-api-access-mhsvp\") pod \"controller-f8648f98b-zhs5b\" (UID: \"3c2a298c-be4d-4b96-82f2-78df48943e0e\") " pod="metallb-system/controller-f8648f98b-zhs5b" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279781 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-metrics-certs\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279802 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0858f09c-2e20-4861-af53-c1df064a5c48-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-6st2l\" (UID: \"0858f09c-2e20-4861-af53-c1df064a5c48\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279824 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/65bd8eca-d900-4b1b-a859-bcbde52e4bea-frr-sockets\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279861 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-memberlist\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279921 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/0c91c45c-1d3c-458a-9725-fcf4529e5db1-metallb-excludel2\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279956 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65bd8eca-d900-4b1b-a859-bcbde52e4bea-metrics-certs\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.279987 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwf7n\" (UniqueName: \"kubernetes.io/projected/0858f09c-2e20-4861-af53-c1df064a5c48-kube-api-access-hwf7n\") pod \"frr-k8s-webhook-server-7fcb986d4-6st2l\" (UID: \"0858f09c-2e20-4861-af53-c1df064a5c48\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.280010 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/65bd8eca-d900-4b1b-a859-bcbde52e4bea-reloader\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.280050 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/65bd8eca-d900-4b1b-a859-bcbde52e4bea-frr-startup\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.280135 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/65bd8eca-d900-4b1b-a859-bcbde52e4bea-frr-conf\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.280428 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/65bd8eca-d900-4b1b-a859-bcbde52e4bea-frr-sockets\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: E1201 18:49:21.280605 4935 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 01 18:49:21 crc kubenswrapper[4935]: E1201 18:49:21.280675 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/65bd8eca-d900-4b1b-a859-bcbde52e4bea-metrics-certs podName:65bd8eca-d900-4b1b-a859-bcbde52e4bea nodeName:}" failed. No retries permitted until 2025-12-01 18:49:21.780655492 +0000 UTC m=+1175.802284751 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/65bd8eca-d900-4b1b-a859-bcbde52e4bea-metrics-certs") pod "frr-k8s-5zlbd" (UID: "65bd8eca-d900-4b1b-a859-bcbde52e4bea") : secret "frr-k8s-certs-secret" not found Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.280900 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/65bd8eca-d900-4b1b-a859-bcbde52e4bea-reloader\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.281123 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/65bd8eca-d900-4b1b-a859-bcbde52e4bea-metrics\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.281169 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/65bd8eca-d900-4b1b-a859-bcbde52e4bea-frr-startup\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.287018 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0858f09c-2e20-4861-af53-c1df064a5c48-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-6st2l\" (UID: \"0858f09c-2e20-4861-af53-c1df064a5c48\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.306031 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwf7n\" (UniqueName: \"kubernetes.io/projected/0858f09c-2e20-4861-af53-c1df064a5c48-kube-api-access-hwf7n\") pod \"frr-k8s-webhook-server-7fcb986d4-6st2l\" (UID: \"0858f09c-2e20-4861-af53-c1df064a5c48\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.308416 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8rlt\" (UniqueName: \"kubernetes.io/projected/65bd8eca-d900-4b1b-a859-bcbde52e4bea-kube-api-access-p8rlt\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.381716 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gjdb\" (UniqueName: \"kubernetes.io/projected/0c91c45c-1d3c-458a-9725-fcf4529e5db1-kube-api-access-8gjdb\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.381779 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3c2a298c-be4d-4b96-82f2-78df48943e0e-metrics-certs\") pod \"controller-f8648f98b-zhs5b\" (UID: \"3c2a298c-be4d-4b96-82f2-78df48943e0e\") " pod="metallb-system/controller-f8648f98b-zhs5b" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.381808 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3c2a298c-be4d-4b96-82f2-78df48943e0e-cert\") pod \"controller-f8648f98b-zhs5b\" (UID: \"3c2a298c-be4d-4b96-82f2-78df48943e0e\") " pod="metallb-system/controller-f8648f98b-zhs5b" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.381863 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhsvp\" (UniqueName: \"kubernetes.io/projected/3c2a298c-be4d-4b96-82f2-78df48943e0e-kube-api-access-mhsvp\") pod \"controller-f8648f98b-zhs5b\" (UID: \"3c2a298c-be4d-4b96-82f2-78df48943e0e\") " pod="metallb-system/controller-f8648f98b-zhs5b" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.381908 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-metrics-certs\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.381942 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-memberlist\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.381983 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/0c91c45c-1d3c-458a-9725-fcf4529e5db1-metallb-excludel2\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: E1201 18:49:21.382493 4935 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 01 18:49:21 crc kubenswrapper[4935]: E1201 18:49:21.382551 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-metrics-certs podName:0c91c45c-1d3c-458a-9725-fcf4529e5db1 nodeName:}" failed. No retries permitted until 2025-12-01 18:49:21.882534516 +0000 UTC m=+1175.904163775 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-metrics-certs") pod "speaker-fwmt4" (UID: "0c91c45c-1d3c-458a-9725-fcf4529e5db1") : secret "speaker-certs-secret" not found Dec 01 18:49:21 crc kubenswrapper[4935]: E1201 18:49:21.382768 4935 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 01 18:49:21 crc kubenswrapper[4935]: E1201 18:49:21.382790 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-memberlist podName:0c91c45c-1d3c-458a-9725-fcf4529e5db1 nodeName:}" failed. No retries permitted until 2025-12-01 18:49:21.882783334 +0000 UTC m=+1175.904412583 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-memberlist") pod "speaker-fwmt4" (UID: "0c91c45c-1d3c-458a-9725-fcf4529e5db1") : secret "metallb-memberlist" not found Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.382935 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/0c91c45c-1d3c-458a-9725-fcf4529e5db1-metallb-excludel2\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.386135 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3c2a298c-be4d-4b96-82f2-78df48943e0e-metrics-certs\") pod \"controller-f8648f98b-zhs5b\" (UID: \"3c2a298c-be4d-4b96-82f2-78df48943e0e\") " pod="metallb-system/controller-f8648f98b-zhs5b" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.389694 4935 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.398282 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3c2a298c-be4d-4b96-82f2-78df48943e0e-cert\") pod \"controller-f8648f98b-zhs5b\" (UID: \"3c2a298c-be4d-4b96-82f2-78df48943e0e\") " pod="metallb-system/controller-f8648f98b-zhs5b" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.406714 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gjdb\" (UniqueName: \"kubernetes.io/projected/0c91c45c-1d3c-458a-9725-fcf4529e5db1-kube-api-access-8gjdb\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.407817 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhsvp\" (UniqueName: \"kubernetes.io/projected/3c2a298c-be4d-4b96-82f2-78df48943e0e-kube-api-access-mhsvp\") pod \"controller-f8648f98b-zhs5b\" (UID: \"3c2a298c-be4d-4b96-82f2-78df48943e0e\") " pod="metallb-system/controller-f8648f98b-zhs5b" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.415323 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.507975 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-zhs5b" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.792111 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65bd8eca-d900-4b1b-a859-bcbde52e4bea-metrics-certs\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.797728 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65bd8eca-d900-4b1b-a859-bcbde52e4bea-metrics-certs\") pod \"frr-k8s-5zlbd\" (UID: \"65bd8eca-d900-4b1b-a859-bcbde52e4bea\") " pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.857982 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l"] Dec 01 18:49:21 crc kubenswrapper[4935]: W1201 18:49:21.863472 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0858f09c_2e20_4861_af53_c1df064a5c48.slice/crio-7411e6b7145cc138e3c199e7e91256dc2e068bbae70b9fcaf70700425dbae5ba WatchSource:0}: Error finding container 7411e6b7145cc138e3c199e7e91256dc2e068bbae70b9fcaf70700425dbae5ba: Status 404 returned error can't find the container with id 7411e6b7145cc138e3c199e7e91256dc2e068bbae70b9fcaf70700425dbae5ba Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.894351 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-memberlist\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.894555 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-metrics-certs\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: E1201 18:49:21.894613 4935 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 01 18:49:21 crc kubenswrapper[4935]: E1201 18:49:21.894712 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-memberlist podName:0c91c45c-1d3c-458a-9725-fcf4529e5db1 nodeName:}" failed. No retries permitted until 2025-12-01 18:49:22.89468754 +0000 UTC m=+1176.916316809 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-memberlist") pod "speaker-fwmt4" (UID: "0c91c45c-1d3c-458a-9725-fcf4529e5db1") : secret "metallb-memberlist" not found Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.898946 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-metrics-certs\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:21 crc kubenswrapper[4935]: I1201 18:49:21.929775 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-zhs5b"] Dec 01 18:49:21 crc kubenswrapper[4935]: W1201 18:49:21.936911 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c2a298c_be4d_4b96_82f2_78df48943e0e.slice/crio-a996f5303c54e29e2ea11b68911f8199155a48138a9a8146ebef79fe70050256 WatchSource:0}: Error finding container a996f5303c54e29e2ea11b68911f8199155a48138a9a8146ebef79fe70050256: Status 404 returned error can't find the container with id a996f5303c54e29e2ea11b68911f8199155a48138a9a8146ebef79fe70050256 Dec 01 18:49:22 crc kubenswrapper[4935]: I1201 18:49:22.001835 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:22 crc kubenswrapper[4935]: I1201 18:49:22.677412 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-5zlbd" event={"ID":"65bd8eca-d900-4b1b-a859-bcbde52e4bea","Type":"ContainerStarted","Data":"91678fcf54f9319813dd6902f953d1ce8cf936e27513ec9ef089438081b867a1"} Dec 01 18:49:22 crc kubenswrapper[4935]: I1201 18:49:22.678955 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l" event={"ID":"0858f09c-2e20-4861-af53-c1df064a5c48","Type":"ContainerStarted","Data":"7411e6b7145cc138e3c199e7e91256dc2e068bbae70b9fcaf70700425dbae5ba"} Dec 01 18:49:22 crc kubenswrapper[4935]: I1201 18:49:22.680309 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-zhs5b" event={"ID":"3c2a298c-be4d-4b96-82f2-78df48943e0e","Type":"ContainerStarted","Data":"0a74c0135973dcb17d4175c3fb5be33b39d88ed2cfac5f716685a11ccb7d74b3"} Dec 01 18:49:22 crc kubenswrapper[4935]: I1201 18:49:22.680344 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-zhs5b" event={"ID":"3c2a298c-be4d-4b96-82f2-78df48943e0e","Type":"ContainerStarted","Data":"a996f5303c54e29e2ea11b68911f8199155a48138a9a8146ebef79fe70050256"} Dec 01 18:49:22 crc kubenswrapper[4935]: I1201 18:49:22.910787 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-memberlist\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:22 crc kubenswrapper[4935]: I1201 18:49:22.917447 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/0c91c45c-1d3c-458a-9725-fcf4529e5db1-memberlist\") pod \"speaker-fwmt4\" (UID: \"0c91c45c-1d3c-458a-9725-fcf4529e5db1\") " pod="metallb-system/speaker-fwmt4" Dec 01 18:49:22 crc kubenswrapper[4935]: I1201 18:49:22.989861 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-fwmt4" Dec 01 18:49:23 crc kubenswrapper[4935]: W1201 18:49:23.026448 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c91c45c_1d3c_458a_9725_fcf4529e5db1.slice/crio-0ae5e53a99edca08255f47e1afb659bcb6663c5f8ed04c178bcf05f94ab658cd WatchSource:0}: Error finding container 0ae5e53a99edca08255f47e1afb659bcb6663c5f8ed04c178bcf05f94ab658cd: Status 404 returned error can't find the container with id 0ae5e53a99edca08255f47e1afb659bcb6663c5f8ed04c178bcf05f94ab658cd Dec 01 18:49:23 crc kubenswrapper[4935]: I1201 18:49:23.688696 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-zhs5b" event={"ID":"3c2a298c-be4d-4b96-82f2-78df48943e0e","Type":"ContainerStarted","Data":"49a9146f8ba4bb7ff937ba06bd5a5dfe91bb3e49ebd94ebb779e0161763eb8de"} Dec 01 18:49:23 crc kubenswrapper[4935]: I1201 18:49:23.689368 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-zhs5b" Dec 01 18:49:23 crc kubenswrapper[4935]: I1201 18:49:23.690320 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-fwmt4" event={"ID":"0c91c45c-1d3c-458a-9725-fcf4529e5db1","Type":"ContainerStarted","Data":"35304a2a6c998bc2d1cb3e32801853911a96d1a8569af3770545811805d259d5"} Dec 01 18:49:23 crc kubenswrapper[4935]: I1201 18:49:23.690345 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-fwmt4" event={"ID":"0c91c45c-1d3c-458a-9725-fcf4529e5db1","Type":"ContainerStarted","Data":"0ae5e53a99edca08255f47e1afb659bcb6663c5f8ed04c178bcf05f94ab658cd"} Dec 01 18:49:23 crc kubenswrapper[4935]: I1201 18:49:23.715314 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-zhs5b" podStartSLOduration=2.715292195 podStartE2EDuration="2.715292195s" podCreationTimestamp="2025-12-01 18:49:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:49:23.710346904 +0000 UTC m=+1177.731976163" watchObservedRunningTime="2025-12-01 18:49:23.715292195 +0000 UTC m=+1177.736921464" Dec 01 18:49:24 crc kubenswrapper[4935]: I1201 18:49:24.345735 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:49:24 crc kubenswrapper[4935]: I1201 18:49:24.345809 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:49:24 crc kubenswrapper[4935]: I1201 18:49:24.705310 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-fwmt4" event={"ID":"0c91c45c-1d3c-458a-9725-fcf4529e5db1","Type":"ContainerStarted","Data":"cfcf3d4c589c740fdcaef18371a40a9b16e548ac91a847e682a4fea54c68b6c6"} Dec 01 18:49:24 crc kubenswrapper[4935]: I1201 18:49:24.705436 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-fwmt4" Dec 01 18:49:24 crc kubenswrapper[4935]: I1201 18:49:24.725214 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-fwmt4" podStartSLOduration=3.725195313 podStartE2EDuration="3.725195313s" podCreationTimestamp="2025-12-01 18:49:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:49:24.724007925 +0000 UTC m=+1178.745637184" watchObservedRunningTime="2025-12-01 18:49:24.725195313 +0000 UTC m=+1178.746824572" Dec 01 18:49:30 crc kubenswrapper[4935]: I1201 18:49:30.751670 4935 generic.go:334] "Generic (PLEG): container finished" podID="65bd8eca-d900-4b1b-a859-bcbde52e4bea" containerID="e48f48fea092b947675aa5729c8b22b254d2e3c941055244b884c6d11c9bc3e2" exitCode=0 Dec 01 18:49:30 crc kubenswrapper[4935]: I1201 18:49:30.751740 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-5zlbd" event={"ID":"65bd8eca-d900-4b1b-a859-bcbde52e4bea","Type":"ContainerDied","Data":"e48f48fea092b947675aa5729c8b22b254d2e3c941055244b884c6d11c9bc3e2"} Dec 01 18:49:30 crc kubenswrapper[4935]: I1201 18:49:30.754731 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l" event={"ID":"0858f09c-2e20-4861-af53-c1df064a5c48","Type":"ContainerStarted","Data":"efc831911757885ae4b47e771d8aaa0971875456307a190aec4778dbc767d206"} Dec 01 18:49:30 crc kubenswrapper[4935]: I1201 18:49:30.754960 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l" Dec 01 18:49:30 crc kubenswrapper[4935]: I1201 18:49:30.796966 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l" podStartSLOduration=1.31758758 podStartE2EDuration="9.796939884s" podCreationTimestamp="2025-12-01 18:49:21 +0000 UTC" firstStartedPulling="2025-12-01 18:49:21.867971911 +0000 UTC m=+1175.889601200" lastFinishedPulling="2025-12-01 18:49:30.347324245 +0000 UTC m=+1184.368953504" observedRunningTime="2025-12-01 18:49:30.789988697 +0000 UTC m=+1184.811617966" watchObservedRunningTime="2025-12-01 18:49:30.796939884 +0000 UTC m=+1184.818569153" Dec 01 18:49:31 crc kubenswrapper[4935]: I1201 18:49:31.765738 4935 generic.go:334] "Generic (PLEG): container finished" podID="65bd8eca-d900-4b1b-a859-bcbde52e4bea" containerID="4791b2bb2b4f7f97583e11940df577a7ba9dfa558c54ea129d03590b781305ea" exitCode=0 Dec 01 18:49:31 crc kubenswrapper[4935]: I1201 18:49:31.765854 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-5zlbd" event={"ID":"65bd8eca-d900-4b1b-a859-bcbde52e4bea","Type":"ContainerDied","Data":"4791b2bb2b4f7f97583e11940df577a7ba9dfa558c54ea129d03590b781305ea"} Dec 01 18:49:32 crc kubenswrapper[4935]: I1201 18:49:32.785675 4935 generic.go:334] "Generic (PLEG): container finished" podID="65bd8eca-d900-4b1b-a859-bcbde52e4bea" containerID="a86816cc80825a413fc79099013589abf55d8356958b923edbdbc3c27de0245f" exitCode=0 Dec 01 18:49:32 crc kubenswrapper[4935]: I1201 18:49:32.785762 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-5zlbd" event={"ID":"65bd8eca-d900-4b1b-a859-bcbde52e4bea","Type":"ContainerDied","Data":"a86816cc80825a413fc79099013589abf55d8356958b923edbdbc3c27de0245f"} Dec 01 18:49:33 crc kubenswrapper[4935]: I1201 18:49:33.801345 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-5zlbd" event={"ID":"65bd8eca-d900-4b1b-a859-bcbde52e4bea","Type":"ContainerStarted","Data":"623158d58e8129dd5a8fb2f72cd20dffdf20c2438d0c026979ebe10155ff33b7"} Dec 01 18:49:33 crc kubenswrapper[4935]: I1201 18:49:33.801938 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-5zlbd" event={"ID":"65bd8eca-d900-4b1b-a859-bcbde52e4bea","Type":"ContainerStarted","Data":"84f14e90589e9c8511dd2a334c1aa64f4f35e9ebd3b81db0dc8285da919d16ee"} Dec 01 18:49:33 crc kubenswrapper[4935]: I1201 18:49:33.801967 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-5zlbd" event={"ID":"65bd8eca-d900-4b1b-a859-bcbde52e4bea","Type":"ContainerStarted","Data":"2350930f5505ed07d71a5da764b2eafb3d088a21a61a48948563e9bcb8a95b60"} Dec 01 18:49:33 crc kubenswrapper[4935]: I1201 18:49:33.801983 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-5zlbd" event={"ID":"65bd8eca-d900-4b1b-a859-bcbde52e4bea","Type":"ContainerStarted","Data":"28cde7076bd9104b9bd37e0c76a8a8b4c281ab6e83103248b45a5840d8fc1440"} Dec 01 18:49:33 crc kubenswrapper[4935]: I1201 18:49:33.801996 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-5zlbd" event={"ID":"65bd8eca-d900-4b1b-a859-bcbde52e4bea","Type":"ContainerStarted","Data":"09e8b026a8acead123cfa2d49544cca367f1af8d87f577e858c4bf5bb7ca6538"} Dec 01 18:49:34 crc kubenswrapper[4935]: I1201 18:49:34.814116 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-5zlbd" event={"ID":"65bd8eca-d900-4b1b-a859-bcbde52e4bea","Type":"ContainerStarted","Data":"df9843b17a8029834ebe75d934ca408e0fe62de6974e67aebd353f9287e85b3f"} Dec 01 18:49:34 crc kubenswrapper[4935]: I1201 18:49:34.814545 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:34 crc kubenswrapper[4935]: I1201 18:49:34.847826 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-5zlbd" podStartSLOduration=5.979551081 podStartE2EDuration="13.847794622s" podCreationTimestamp="2025-12-01 18:49:21 +0000 UTC" firstStartedPulling="2025-12-01 18:49:22.491237149 +0000 UTC m=+1176.512866418" lastFinishedPulling="2025-12-01 18:49:30.3594807 +0000 UTC m=+1184.381109959" observedRunningTime="2025-12-01 18:49:34.835386228 +0000 UTC m=+1188.857015497" watchObservedRunningTime="2025-12-01 18:49:34.847794622 +0000 UTC m=+1188.869423921" Dec 01 18:49:37 crc kubenswrapper[4935]: I1201 18:49:37.003040 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:37 crc kubenswrapper[4935]: I1201 18:49:37.037598 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:41 crc kubenswrapper[4935]: I1201 18:49:41.423492 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6st2l" Dec 01 18:49:41 crc kubenswrapper[4935]: I1201 18:49:41.515900 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-zhs5b" Dec 01 18:49:42 crc kubenswrapper[4935]: I1201 18:49:42.004719 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-5zlbd" Dec 01 18:49:42 crc kubenswrapper[4935]: I1201 18:49:42.995286 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-fwmt4" Dec 01 18:49:46 crc kubenswrapper[4935]: I1201 18:49:46.234780 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-dkrml"] Dec 01 18:49:46 crc kubenswrapper[4935]: I1201 18:49:46.237800 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-dkrml" Dec 01 18:49:46 crc kubenswrapper[4935]: I1201 18:49:46.240134 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz8ft\" (UniqueName: \"kubernetes.io/projected/095cbe4b-22f2-46c3-801b-a9aeaea6e2cd-kube-api-access-fz8ft\") pod \"openstack-operator-index-dkrml\" (UID: \"095cbe4b-22f2-46c3-801b-a9aeaea6e2cd\") " pod="openstack-operators/openstack-operator-index-dkrml" Dec 01 18:49:46 crc kubenswrapper[4935]: I1201 18:49:46.240606 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-gghdz" Dec 01 18:49:46 crc kubenswrapper[4935]: I1201 18:49:46.243944 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 01 18:49:46 crc kubenswrapper[4935]: I1201 18:49:46.247626 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 01 18:49:46 crc kubenswrapper[4935]: I1201 18:49:46.255379 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-dkrml"] Dec 01 18:49:46 crc kubenswrapper[4935]: I1201 18:49:46.343594 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz8ft\" (UniqueName: \"kubernetes.io/projected/095cbe4b-22f2-46c3-801b-a9aeaea6e2cd-kube-api-access-fz8ft\") pod \"openstack-operator-index-dkrml\" (UID: \"095cbe4b-22f2-46c3-801b-a9aeaea6e2cd\") " pod="openstack-operators/openstack-operator-index-dkrml" Dec 01 18:49:46 crc kubenswrapper[4935]: I1201 18:49:46.396256 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz8ft\" (UniqueName: \"kubernetes.io/projected/095cbe4b-22f2-46c3-801b-a9aeaea6e2cd-kube-api-access-fz8ft\") pod \"openstack-operator-index-dkrml\" (UID: \"095cbe4b-22f2-46c3-801b-a9aeaea6e2cd\") " pod="openstack-operators/openstack-operator-index-dkrml" Dec 01 18:49:46 crc kubenswrapper[4935]: I1201 18:49:46.598209 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-gghdz" Dec 01 18:49:46 crc kubenswrapper[4935]: I1201 18:49:46.605494 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-dkrml" Dec 01 18:49:47 crc kubenswrapper[4935]: I1201 18:49:47.022609 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-dkrml"] Dec 01 18:49:47 crc kubenswrapper[4935]: I1201 18:49:47.953572 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-dkrml" event={"ID":"095cbe4b-22f2-46c3-801b-a9aeaea6e2cd","Type":"ContainerStarted","Data":"c6d0dd8f6d58d5566b065f15b4166c89439b62993eddb23845c611baf58d17e0"} Dec 01 18:49:49 crc kubenswrapper[4935]: I1201 18:49:49.595685 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-dkrml"] Dec 01 18:49:50 crc kubenswrapper[4935]: I1201 18:49:50.214471 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-nfg25"] Dec 01 18:49:50 crc kubenswrapper[4935]: I1201 18:49:50.216639 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nfg25" Dec 01 18:49:50 crc kubenswrapper[4935]: I1201 18:49:50.224658 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-nfg25"] Dec 01 18:49:50 crc kubenswrapper[4935]: I1201 18:49:50.320715 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfg8j\" (UniqueName: \"kubernetes.io/projected/323c3307-d34a-4502-8a68-cef37832f834-kube-api-access-mfg8j\") pod \"openstack-operator-index-nfg25\" (UID: \"323c3307-d34a-4502-8a68-cef37832f834\") " pod="openstack-operators/openstack-operator-index-nfg25" Dec 01 18:49:50 crc kubenswrapper[4935]: I1201 18:49:50.422804 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfg8j\" (UniqueName: \"kubernetes.io/projected/323c3307-d34a-4502-8a68-cef37832f834-kube-api-access-mfg8j\") pod \"openstack-operator-index-nfg25\" (UID: \"323c3307-d34a-4502-8a68-cef37832f834\") " pod="openstack-operators/openstack-operator-index-nfg25" Dec 01 18:49:50 crc kubenswrapper[4935]: I1201 18:49:50.444611 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfg8j\" (UniqueName: \"kubernetes.io/projected/323c3307-d34a-4502-8a68-cef37832f834-kube-api-access-mfg8j\") pod \"openstack-operator-index-nfg25\" (UID: \"323c3307-d34a-4502-8a68-cef37832f834\") " pod="openstack-operators/openstack-operator-index-nfg25" Dec 01 18:49:50 crc kubenswrapper[4935]: I1201 18:49:50.542078 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nfg25" Dec 01 18:49:51 crc kubenswrapper[4935]: I1201 18:49:51.211545 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-nfg25"] Dec 01 18:49:51 crc kubenswrapper[4935]: I1201 18:49:51.994896 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-dkrml" event={"ID":"095cbe4b-22f2-46c3-801b-a9aeaea6e2cd","Type":"ContainerStarted","Data":"21c42ea899a0d36309a855720cb1c112c335324377025013f16170c35af01bb8"} Dec 01 18:49:51 crc kubenswrapper[4935]: I1201 18:49:51.995106 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-dkrml" podUID="095cbe4b-22f2-46c3-801b-a9aeaea6e2cd" containerName="registry-server" containerID="cri-o://21c42ea899a0d36309a855720cb1c112c335324377025013f16170c35af01bb8" gracePeriod=2 Dec 01 18:49:51 crc kubenswrapper[4935]: I1201 18:49:51.998457 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nfg25" event={"ID":"323c3307-d34a-4502-8a68-cef37832f834","Type":"ContainerStarted","Data":"933cc6983094958ec0f9c1155c26bc7432c588499c20ef51654833fd418674bf"} Dec 01 18:49:51 crc kubenswrapper[4935]: I1201 18:49:51.998505 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nfg25" event={"ID":"323c3307-d34a-4502-8a68-cef37832f834","Type":"ContainerStarted","Data":"b28f8781d0aa62167016be1a1639871402d1371a7dbfb49ca42357b48547885c"} Dec 01 18:49:52 crc kubenswrapper[4935]: I1201 18:49:52.029193 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-dkrml" podStartSLOduration=2.23818229 podStartE2EDuration="6.029118672s" podCreationTimestamp="2025-12-01 18:49:46 +0000 UTC" firstStartedPulling="2025-12-01 18:49:47.027488589 +0000 UTC m=+1201.049117848" lastFinishedPulling="2025-12-01 18:49:50.818424971 +0000 UTC m=+1204.840054230" observedRunningTime="2025-12-01 18:49:52.028451701 +0000 UTC m=+1206.050081000" watchObservedRunningTime="2025-12-01 18:49:52.029118672 +0000 UTC m=+1206.050747971" Dec 01 18:49:52 crc kubenswrapper[4935]: I1201 18:49:52.050611 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-nfg25" podStartSLOduration=1.9025364040000001 podStartE2EDuration="2.05055968s" podCreationTimestamp="2025-12-01 18:49:50 +0000 UTC" firstStartedPulling="2025-12-01 18:49:51.211679176 +0000 UTC m=+1205.233308475" lastFinishedPulling="2025-12-01 18:49:51.359702492 +0000 UTC m=+1205.381331751" observedRunningTime="2025-12-01 18:49:52.047721537 +0000 UTC m=+1206.069350826" watchObservedRunningTime="2025-12-01 18:49:52.05055968 +0000 UTC m=+1206.072188969" Dec 01 18:49:52 crc kubenswrapper[4935]: I1201 18:49:52.538760 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-dkrml" Dec 01 18:49:52 crc kubenswrapper[4935]: I1201 18:49:52.667782 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fz8ft\" (UniqueName: \"kubernetes.io/projected/095cbe4b-22f2-46c3-801b-a9aeaea6e2cd-kube-api-access-fz8ft\") pod \"095cbe4b-22f2-46c3-801b-a9aeaea6e2cd\" (UID: \"095cbe4b-22f2-46c3-801b-a9aeaea6e2cd\") " Dec 01 18:49:52 crc kubenswrapper[4935]: I1201 18:49:52.674042 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/095cbe4b-22f2-46c3-801b-a9aeaea6e2cd-kube-api-access-fz8ft" (OuterVolumeSpecName: "kube-api-access-fz8ft") pod "095cbe4b-22f2-46c3-801b-a9aeaea6e2cd" (UID: "095cbe4b-22f2-46c3-801b-a9aeaea6e2cd"). InnerVolumeSpecName "kube-api-access-fz8ft". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:49:52 crc kubenswrapper[4935]: I1201 18:49:52.770169 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fz8ft\" (UniqueName: \"kubernetes.io/projected/095cbe4b-22f2-46c3-801b-a9aeaea6e2cd-kube-api-access-fz8ft\") on node \"crc\" DevicePath \"\"" Dec 01 18:49:53 crc kubenswrapper[4935]: I1201 18:49:53.012388 4935 generic.go:334] "Generic (PLEG): container finished" podID="095cbe4b-22f2-46c3-801b-a9aeaea6e2cd" containerID="21c42ea899a0d36309a855720cb1c112c335324377025013f16170c35af01bb8" exitCode=0 Dec 01 18:49:53 crc kubenswrapper[4935]: I1201 18:49:53.012493 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-dkrml" Dec 01 18:49:53 crc kubenswrapper[4935]: I1201 18:49:53.012486 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-dkrml" event={"ID":"095cbe4b-22f2-46c3-801b-a9aeaea6e2cd","Type":"ContainerDied","Data":"21c42ea899a0d36309a855720cb1c112c335324377025013f16170c35af01bb8"} Dec 01 18:49:53 crc kubenswrapper[4935]: I1201 18:49:53.012607 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-dkrml" event={"ID":"095cbe4b-22f2-46c3-801b-a9aeaea6e2cd","Type":"ContainerDied","Data":"c6d0dd8f6d58d5566b065f15b4166c89439b62993eddb23845c611baf58d17e0"} Dec 01 18:49:53 crc kubenswrapper[4935]: I1201 18:49:53.012652 4935 scope.go:117] "RemoveContainer" containerID="21c42ea899a0d36309a855720cb1c112c335324377025013f16170c35af01bb8" Dec 01 18:49:53 crc kubenswrapper[4935]: I1201 18:49:53.061492 4935 scope.go:117] "RemoveContainer" containerID="21c42ea899a0d36309a855720cb1c112c335324377025013f16170c35af01bb8" Dec 01 18:49:53 crc kubenswrapper[4935]: E1201 18:49:53.062453 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21c42ea899a0d36309a855720cb1c112c335324377025013f16170c35af01bb8\": container with ID starting with 21c42ea899a0d36309a855720cb1c112c335324377025013f16170c35af01bb8 not found: ID does not exist" containerID="21c42ea899a0d36309a855720cb1c112c335324377025013f16170c35af01bb8" Dec 01 18:49:53 crc kubenswrapper[4935]: I1201 18:49:53.062522 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21c42ea899a0d36309a855720cb1c112c335324377025013f16170c35af01bb8"} err="failed to get container status \"21c42ea899a0d36309a855720cb1c112c335324377025013f16170c35af01bb8\": rpc error: code = NotFound desc = could not find container \"21c42ea899a0d36309a855720cb1c112c335324377025013f16170c35af01bb8\": container with ID starting with 21c42ea899a0d36309a855720cb1c112c335324377025013f16170c35af01bb8 not found: ID does not exist" Dec 01 18:49:53 crc kubenswrapper[4935]: I1201 18:49:53.070036 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-dkrml"] Dec 01 18:49:53 crc kubenswrapper[4935]: I1201 18:49:53.081539 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-dkrml"] Dec 01 18:49:54 crc kubenswrapper[4935]: I1201 18:49:54.346338 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:49:54 crc kubenswrapper[4935]: I1201 18:49:54.346434 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:49:54 crc kubenswrapper[4935]: I1201 18:49:54.346507 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:49:54 crc kubenswrapper[4935]: I1201 18:49:54.347683 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"89d0c184ee1dbdba2189f946ff97ea233b33f6dde95b0c4dc3f41a9fad7d86ae"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 18:49:54 crc kubenswrapper[4935]: I1201 18:49:54.347795 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://89d0c184ee1dbdba2189f946ff97ea233b33f6dde95b0c4dc3f41a9fad7d86ae" gracePeriod=600 Dec 01 18:49:54 crc kubenswrapper[4935]: I1201 18:49:54.520976 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="095cbe4b-22f2-46c3-801b-a9aeaea6e2cd" path="/var/lib/kubelet/pods/095cbe4b-22f2-46c3-801b-a9aeaea6e2cd/volumes" Dec 01 18:49:55 crc kubenswrapper[4935]: I1201 18:49:55.047126 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="89d0c184ee1dbdba2189f946ff97ea233b33f6dde95b0c4dc3f41a9fad7d86ae" exitCode=0 Dec 01 18:49:55 crc kubenswrapper[4935]: I1201 18:49:55.047201 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"89d0c184ee1dbdba2189f946ff97ea233b33f6dde95b0c4dc3f41a9fad7d86ae"} Dec 01 18:49:55 crc kubenswrapper[4935]: I1201 18:49:55.047257 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"8d43b47ae64729f61d960fc5685829c02da961e532465f8f3fc4e3129716002b"} Dec 01 18:49:55 crc kubenswrapper[4935]: I1201 18:49:55.047278 4935 scope.go:117] "RemoveContainer" containerID="744bd448e7cc386bf9953720a69481d4b4d71c4c1477d84184ae1c1693198763" Dec 01 18:50:00 crc kubenswrapper[4935]: I1201 18:50:00.543139 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-nfg25" Dec 01 18:50:00 crc kubenswrapper[4935]: I1201 18:50:00.543803 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-nfg25" Dec 01 18:50:00 crc kubenswrapper[4935]: I1201 18:50:00.573204 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-nfg25" Dec 01 18:50:01 crc kubenswrapper[4935]: I1201 18:50:01.155282 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-nfg25" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.662410 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj"] Dec 01 18:50:02 crc kubenswrapper[4935]: E1201 18:50:02.663110 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="095cbe4b-22f2-46c3-801b-a9aeaea6e2cd" containerName="registry-server" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.663126 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="095cbe4b-22f2-46c3-801b-a9aeaea6e2cd" containerName="registry-server" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.663340 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="095cbe4b-22f2-46c3-801b-a9aeaea6e2cd" containerName="registry-server" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.664633 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.668996 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj"] Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.669419 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-n9j9s" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.761822 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grwhh\" (UniqueName: \"kubernetes.io/projected/a7adc7fa-0b6a-4433-8e11-53e6ba203427-kube-api-access-grwhh\") pod \"382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj\" (UID: \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\") " pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.761895 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a7adc7fa-0b6a-4433-8e11-53e6ba203427-util\") pod \"382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj\" (UID: \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\") " pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.761962 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a7adc7fa-0b6a-4433-8e11-53e6ba203427-bundle\") pod \"382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj\" (UID: \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\") " pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.863098 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grwhh\" (UniqueName: \"kubernetes.io/projected/a7adc7fa-0b6a-4433-8e11-53e6ba203427-kube-api-access-grwhh\") pod \"382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj\" (UID: \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\") " pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.863189 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a7adc7fa-0b6a-4433-8e11-53e6ba203427-util\") pod \"382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj\" (UID: \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\") " pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.863248 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a7adc7fa-0b6a-4433-8e11-53e6ba203427-bundle\") pod \"382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj\" (UID: \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\") " pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.864047 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a7adc7fa-0b6a-4433-8e11-53e6ba203427-util\") pod \"382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj\" (UID: \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\") " pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.864089 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a7adc7fa-0b6a-4433-8e11-53e6ba203427-bundle\") pod \"382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj\" (UID: \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\") " pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.887722 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grwhh\" (UniqueName: \"kubernetes.io/projected/a7adc7fa-0b6a-4433-8e11-53e6ba203427-kube-api-access-grwhh\") pod \"382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj\" (UID: \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\") " pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" Dec 01 18:50:02 crc kubenswrapper[4935]: I1201 18:50:02.999354 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" Dec 01 18:50:03 crc kubenswrapper[4935]: I1201 18:50:03.423354 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj"] Dec 01 18:50:03 crc kubenswrapper[4935]: W1201 18:50:03.429244 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7adc7fa_0b6a_4433_8e11_53e6ba203427.slice/crio-4bec4ab8e9bed898fe02452423fea38756e658fbecc20ac0bb56bd62af952e66 WatchSource:0}: Error finding container 4bec4ab8e9bed898fe02452423fea38756e658fbecc20ac0bb56bd62af952e66: Status 404 returned error can't find the container with id 4bec4ab8e9bed898fe02452423fea38756e658fbecc20ac0bb56bd62af952e66 Dec 01 18:50:04 crc kubenswrapper[4935]: I1201 18:50:04.142392 4935 generic.go:334] "Generic (PLEG): container finished" podID="a7adc7fa-0b6a-4433-8e11-53e6ba203427" containerID="ae391a2493072e7ab888da51dc508ece6b13713c7fd814aa9c4772723ecbc991" exitCode=0 Dec 01 18:50:04 crc kubenswrapper[4935]: I1201 18:50:04.142478 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" event={"ID":"a7adc7fa-0b6a-4433-8e11-53e6ba203427","Type":"ContainerDied","Data":"ae391a2493072e7ab888da51dc508ece6b13713c7fd814aa9c4772723ecbc991"} Dec 01 18:50:04 crc kubenswrapper[4935]: I1201 18:50:04.142886 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" event={"ID":"a7adc7fa-0b6a-4433-8e11-53e6ba203427","Type":"ContainerStarted","Data":"4bec4ab8e9bed898fe02452423fea38756e658fbecc20ac0bb56bd62af952e66"} Dec 01 18:50:05 crc kubenswrapper[4935]: I1201 18:50:05.163915 4935 generic.go:334] "Generic (PLEG): container finished" podID="a7adc7fa-0b6a-4433-8e11-53e6ba203427" containerID="02dc64b606f419f90aebeded0bf82644f21df2882532d35d061410d410bc16dd" exitCode=0 Dec 01 18:50:05 crc kubenswrapper[4935]: I1201 18:50:05.163972 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" event={"ID":"a7adc7fa-0b6a-4433-8e11-53e6ba203427","Type":"ContainerDied","Data":"02dc64b606f419f90aebeded0bf82644f21df2882532d35d061410d410bc16dd"} Dec 01 18:50:06 crc kubenswrapper[4935]: I1201 18:50:06.178103 4935 generic.go:334] "Generic (PLEG): container finished" podID="a7adc7fa-0b6a-4433-8e11-53e6ba203427" containerID="0f0f846bae154949be6749187dcbe1184ca856bae5198f00632ab6d6ce68d35e" exitCode=0 Dec 01 18:50:06 crc kubenswrapper[4935]: I1201 18:50:06.178246 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" event={"ID":"a7adc7fa-0b6a-4433-8e11-53e6ba203427","Type":"ContainerDied","Data":"0f0f846bae154949be6749187dcbe1184ca856bae5198f00632ab6d6ce68d35e"} Dec 01 18:50:07 crc kubenswrapper[4935]: I1201 18:50:07.550994 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" Dec 01 18:50:07 crc kubenswrapper[4935]: I1201 18:50:07.687351 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a7adc7fa-0b6a-4433-8e11-53e6ba203427-util\") pod \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\" (UID: \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\") " Dec 01 18:50:07 crc kubenswrapper[4935]: I1201 18:50:07.687750 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grwhh\" (UniqueName: \"kubernetes.io/projected/a7adc7fa-0b6a-4433-8e11-53e6ba203427-kube-api-access-grwhh\") pod \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\" (UID: \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\") " Dec 01 18:50:07 crc kubenswrapper[4935]: I1201 18:50:07.688104 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a7adc7fa-0b6a-4433-8e11-53e6ba203427-bundle\") pod \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\" (UID: \"a7adc7fa-0b6a-4433-8e11-53e6ba203427\") " Dec 01 18:50:07 crc kubenswrapper[4935]: I1201 18:50:07.688911 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7adc7fa-0b6a-4433-8e11-53e6ba203427-bundle" (OuterVolumeSpecName: "bundle") pod "a7adc7fa-0b6a-4433-8e11-53e6ba203427" (UID: "a7adc7fa-0b6a-4433-8e11-53e6ba203427"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:50:07 crc kubenswrapper[4935]: I1201 18:50:07.689713 4935 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a7adc7fa-0b6a-4433-8e11-53e6ba203427-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:50:07 crc kubenswrapper[4935]: I1201 18:50:07.697608 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7adc7fa-0b6a-4433-8e11-53e6ba203427-kube-api-access-grwhh" (OuterVolumeSpecName: "kube-api-access-grwhh") pod "a7adc7fa-0b6a-4433-8e11-53e6ba203427" (UID: "a7adc7fa-0b6a-4433-8e11-53e6ba203427"). InnerVolumeSpecName "kube-api-access-grwhh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:50:07 crc kubenswrapper[4935]: I1201 18:50:07.702565 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7adc7fa-0b6a-4433-8e11-53e6ba203427-util" (OuterVolumeSpecName: "util") pod "a7adc7fa-0b6a-4433-8e11-53e6ba203427" (UID: "a7adc7fa-0b6a-4433-8e11-53e6ba203427"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:50:07 crc kubenswrapper[4935]: I1201 18:50:07.791548 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grwhh\" (UniqueName: \"kubernetes.io/projected/a7adc7fa-0b6a-4433-8e11-53e6ba203427-kube-api-access-grwhh\") on node \"crc\" DevicePath \"\"" Dec 01 18:50:07 crc kubenswrapper[4935]: I1201 18:50:07.791632 4935 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a7adc7fa-0b6a-4433-8e11-53e6ba203427-util\") on node \"crc\" DevicePath \"\"" Dec 01 18:50:08 crc kubenswrapper[4935]: I1201 18:50:08.201778 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" event={"ID":"a7adc7fa-0b6a-4433-8e11-53e6ba203427","Type":"ContainerDied","Data":"4bec4ab8e9bed898fe02452423fea38756e658fbecc20ac0bb56bd62af952e66"} Dec 01 18:50:08 crc kubenswrapper[4935]: I1201 18:50:08.201843 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4bec4ab8e9bed898fe02452423fea38756e658fbecc20ac0bb56bd62af952e66" Dec 01 18:50:08 crc kubenswrapper[4935]: I1201 18:50:08.201876 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj" Dec 01 18:50:15 crc kubenswrapper[4935]: I1201 18:50:15.322527 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-868fc9df76-9f6cb"] Dec 01 18:50:15 crc kubenswrapper[4935]: E1201 18:50:15.323425 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7adc7fa-0b6a-4433-8e11-53e6ba203427" containerName="pull" Dec 01 18:50:15 crc kubenswrapper[4935]: I1201 18:50:15.323442 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7adc7fa-0b6a-4433-8e11-53e6ba203427" containerName="pull" Dec 01 18:50:15 crc kubenswrapper[4935]: E1201 18:50:15.323454 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7adc7fa-0b6a-4433-8e11-53e6ba203427" containerName="util" Dec 01 18:50:15 crc kubenswrapper[4935]: I1201 18:50:15.323461 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7adc7fa-0b6a-4433-8e11-53e6ba203427" containerName="util" Dec 01 18:50:15 crc kubenswrapper[4935]: E1201 18:50:15.323503 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7adc7fa-0b6a-4433-8e11-53e6ba203427" containerName="extract" Dec 01 18:50:15 crc kubenswrapper[4935]: I1201 18:50:15.323511 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7adc7fa-0b6a-4433-8e11-53e6ba203427" containerName="extract" Dec 01 18:50:15 crc kubenswrapper[4935]: I1201 18:50:15.323717 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7adc7fa-0b6a-4433-8e11-53e6ba203427" containerName="extract" Dec 01 18:50:15 crc kubenswrapper[4935]: I1201 18:50:15.324405 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-868fc9df76-9f6cb" Dec 01 18:50:15 crc kubenswrapper[4935]: I1201 18:50:15.334482 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-6qtmd" Dec 01 18:50:15 crc kubenswrapper[4935]: I1201 18:50:15.363420 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-868fc9df76-9f6cb"] Dec 01 18:50:15 crc kubenswrapper[4935]: I1201 18:50:15.428489 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptll9\" (UniqueName: \"kubernetes.io/projected/c4a7a2a7-3e74-4d52-a597-2c85b31e9bc4-kube-api-access-ptll9\") pod \"openstack-operator-controller-operator-868fc9df76-9f6cb\" (UID: \"c4a7a2a7-3e74-4d52-a597-2c85b31e9bc4\") " pod="openstack-operators/openstack-operator-controller-operator-868fc9df76-9f6cb" Dec 01 18:50:15 crc kubenswrapper[4935]: I1201 18:50:15.530566 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptll9\" (UniqueName: \"kubernetes.io/projected/c4a7a2a7-3e74-4d52-a597-2c85b31e9bc4-kube-api-access-ptll9\") pod \"openstack-operator-controller-operator-868fc9df76-9f6cb\" (UID: \"c4a7a2a7-3e74-4d52-a597-2c85b31e9bc4\") " pod="openstack-operators/openstack-operator-controller-operator-868fc9df76-9f6cb" Dec 01 18:50:15 crc kubenswrapper[4935]: I1201 18:50:15.566425 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptll9\" (UniqueName: \"kubernetes.io/projected/c4a7a2a7-3e74-4d52-a597-2c85b31e9bc4-kube-api-access-ptll9\") pod \"openstack-operator-controller-operator-868fc9df76-9f6cb\" (UID: \"c4a7a2a7-3e74-4d52-a597-2c85b31e9bc4\") " pod="openstack-operators/openstack-operator-controller-operator-868fc9df76-9f6cb" Dec 01 18:50:15 crc kubenswrapper[4935]: I1201 18:50:15.643051 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-868fc9df76-9f6cb" Dec 01 18:50:16 crc kubenswrapper[4935]: I1201 18:50:16.133106 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-868fc9df76-9f6cb"] Dec 01 18:50:16 crc kubenswrapper[4935]: I1201 18:50:16.311780 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-868fc9df76-9f6cb" event={"ID":"c4a7a2a7-3e74-4d52-a597-2c85b31e9bc4","Type":"ContainerStarted","Data":"82fecf6365f697e915d0f9cf98dc46479881703f016112fdc96c5ed6acadf65c"} Dec 01 18:50:21 crc kubenswrapper[4935]: I1201 18:50:21.367325 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-868fc9df76-9f6cb" event={"ID":"c4a7a2a7-3e74-4d52-a597-2c85b31e9bc4","Type":"ContainerStarted","Data":"8baa1a242500a793e83d6028f36e2a2a7da3dac53a46578dba8aa0e7ee351ce1"} Dec 01 18:50:21 crc kubenswrapper[4935]: I1201 18:50:21.371444 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-868fc9df76-9f6cb" Dec 01 18:50:21 crc kubenswrapper[4935]: I1201 18:50:21.424372 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-868fc9df76-9f6cb" podStartSLOduration=1.98177329 podStartE2EDuration="6.424350544s" podCreationTimestamp="2025-12-01 18:50:15 +0000 UTC" firstStartedPulling="2025-12-01 18:50:16.153622975 +0000 UTC m=+1230.175252244" lastFinishedPulling="2025-12-01 18:50:20.596200239 +0000 UTC m=+1234.617829498" observedRunningTime="2025-12-01 18:50:21.415595259 +0000 UTC m=+1235.437224528" watchObservedRunningTime="2025-12-01 18:50:21.424350544 +0000 UTC m=+1235.445979813" Dec 01 18:50:25 crc kubenswrapper[4935]: I1201 18:50:25.647561 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-868fc9df76-9f6cb" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.791221 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.792928 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.795717 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.797090 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.797556 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-6gb8x" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.798815 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-nnn72" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.818012 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.832791 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.834121 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.835760 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-b765g" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.842907 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.844263 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.846608 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-gvw8g" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.852954 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.854213 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.858208 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-nhzcg" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.861069 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.877773 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wctz5\" (UniqueName: \"kubernetes.io/projected/a66fb641-eb39-4326-a4cb-d4e006a57436-kube-api-access-wctz5\") pod \"barbican-operator-controller-manager-7d9dfd778-4crq6\" (UID: \"a66fb641-eb39-4326-a4cb-d4e006a57436\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.877812 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktl9f\" (UniqueName: \"kubernetes.io/projected/8a1ded04-5c24-467c-a51b-c0cfbe67ba4b-kube-api-access-ktl9f\") pod \"cinder-operator-controller-manager-859b6ccc6-w2v9w\" (UID: \"8a1ded04-5c24-467c-a51b-c0cfbe67ba4b\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.886221 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.893512 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.904314 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.934210 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.935613 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.939393 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-lqdwl" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.954254 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.974299 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.975615 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.979056 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8p8k\" (UniqueName: \"kubernetes.io/projected/122ce04b-8536-401a-820f-fd1b9f04afcf-kube-api-access-d8p8k\") pod \"glance-operator-controller-manager-668d9c48b9-wwjxq\" (UID: \"122ce04b-8536-401a-820f-fd1b9f04afcf\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.979132 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wctz5\" (UniqueName: \"kubernetes.io/projected/a66fb641-eb39-4326-a4cb-d4e006a57436-kube-api-access-wctz5\") pod \"barbican-operator-controller-manager-7d9dfd778-4crq6\" (UID: \"a66fb641-eb39-4326-a4cb-d4e006a57436\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.979170 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktl9f\" (UniqueName: \"kubernetes.io/projected/8a1ded04-5c24-467c-a51b-c0cfbe67ba4b-kube-api-access-ktl9f\") pod \"cinder-operator-controller-manager-859b6ccc6-w2v9w\" (UID: \"8a1ded04-5c24-467c-a51b-c0cfbe67ba4b\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.979220 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hmjb\" (UniqueName: \"kubernetes.io/projected/e3aa8650-ce39-4eb2-8cae-eb012347abb6-kube-api-access-4hmjb\") pod \"heat-operator-controller-manager-5f64f6f8bb-j7bwb\" (UID: \"e3aa8650-ce39-4eb2-8cae-eb012347abb6\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.979294 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppddh\" (UniqueName: \"kubernetes.io/projected/90db9fa3-a008-4a95-910d-fd7b92f37dea-kube-api-access-ppddh\") pod \"designate-operator-controller-manager-78b4bc895b-7hmvv\" (UID: \"90db9fa3-a008-4a95-910d-fd7b92f37dea\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.986876 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp"] Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.988412 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 01 18:50:45 crc kubenswrapper[4935]: I1201 18:50:45.988716 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-zc9m4" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.003326 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktl9f\" (UniqueName: \"kubernetes.io/projected/8a1ded04-5c24-467c-a51b-c0cfbe67ba4b-kube-api-access-ktl9f\") pod \"cinder-operator-controller-manager-859b6ccc6-w2v9w\" (UID: \"8a1ded04-5c24-467c-a51b-c0cfbe67ba4b\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.018214 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.019598 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.032195 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wctz5\" (UniqueName: \"kubernetes.io/projected/a66fb641-eb39-4326-a4cb-d4e006a57436-kube-api-access-wctz5\") pod \"barbican-operator-controller-manager-7d9dfd778-4crq6\" (UID: \"a66fb641-eb39-4326-a4cb-d4e006a57436\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.036623 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-cvjcs" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.080550 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn798\" (UniqueName: \"kubernetes.io/projected/671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161-kube-api-access-rn798\") pod \"horizon-operator-controller-manager-68c6d99b8f-zx4xc\" (UID: \"671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.080724 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzstd\" (UniqueName: \"kubernetes.io/projected/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-kube-api-access-lzstd\") pod \"infra-operator-controller-manager-57548d458d-k9kcp\" (UID: \"38100ae6-51a8-4a49-87d1-704ea8b5a0bc\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.080848 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppddh\" (UniqueName: \"kubernetes.io/projected/90db9fa3-a008-4a95-910d-fd7b92f37dea-kube-api-access-ppddh\") pod \"designate-operator-controller-manager-78b4bc895b-7hmvv\" (UID: \"90db9fa3-a008-4a95-910d-fd7b92f37dea\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.080949 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8p8k\" (UniqueName: \"kubernetes.io/projected/122ce04b-8536-401a-820f-fd1b9f04afcf-kube-api-access-d8p8k\") pod \"glance-operator-controller-manager-668d9c48b9-wwjxq\" (UID: \"122ce04b-8536-401a-820f-fd1b9f04afcf\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.081021 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.081201 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert\") pod \"infra-operator-controller-manager-57548d458d-k9kcp\" (UID: \"38100ae6-51a8-4a49-87d1-704ea8b5a0bc\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.081333 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hmjb\" (UniqueName: \"kubernetes.io/projected/e3aa8650-ce39-4eb2-8cae-eb012347abb6-kube-api-access-4hmjb\") pod \"heat-operator-controller-manager-5f64f6f8bb-j7bwb\" (UID: \"e3aa8650-ce39-4eb2-8cae-eb012347abb6\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.101994 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.103418 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.126549 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.126685 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.127011 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-xfwqg" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.128730 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppddh\" (UniqueName: \"kubernetes.io/projected/90db9fa3-a008-4a95-910d-fd7b92f37dea-kube-api-access-ppddh\") pod \"designate-operator-controller-manager-78b4bc895b-7hmvv\" (UID: \"90db9fa3-a008-4a95-910d-fd7b92f37dea\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.139449 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.146396 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hmjb\" (UniqueName: \"kubernetes.io/projected/e3aa8650-ce39-4eb2-8cae-eb012347abb6-kube-api-access-4hmjb\") pod \"heat-operator-controller-manager-5f64f6f8bb-j7bwb\" (UID: \"e3aa8650-ce39-4eb2-8cae-eb012347abb6\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.150912 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8p8k\" (UniqueName: \"kubernetes.io/projected/122ce04b-8536-401a-820f-fd1b9f04afcf-kube-api-access-d8p8k\") pod \"glance-operator-controller-manager-668d9c48b9-wwjxq\" (UID: \"122ce04b-8536-401a-820f-fd1b9f04afcf\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.160577 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.178690 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.185676 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz72j\" (UniqueName: \"kubernetes.io/projected/d0336662-89bd-415e-8c22-2b05bf5dbf9f-kube-api-access-hz72j\") pod \"ironic-operator-controller-manager-6c548fd776-vtzkd\" (UID: \"d0336662-89bd-415e-8c22-2b05bf5dbf9f\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.185748 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert\") pod \"infra-operator-controller-manager-57548d458d-k9kcp\" (UID: \"38100ae6-51a8-4a49-87d1-704ea8b5a0bc\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.185798 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85t78\" (UniqueName: \"kubernetes.io/projected/8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6-kube-api-access-85t78\") pod \"keystone-operator-controller-manager-546d4bdf48-vk8hm\" (UID: \"8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.185829 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn798\" (UniqueName: \"kubernetes.io/projected/671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161-kube-api-access-rn798\") pod \"horizon-operator-controller-manager-68c6d99b8f-zx4xc\" (UID: \"671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.185857 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzstd\" (UniqueName: \"kubernetes.io/projected/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-kube-api-access-lzstd\") pod \"infra-operator-controller-manager-57548d458d-k9kcp\" (UID: \"38100ae6-51a8-4a49-87d1-704ea8b5a0bc\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:50:46 crc kubenswrapper[4935]: E1201 18:50:46.186338 4935 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 18:50:46 crc kubenswrapper[4935]: E1201 18:50:46.186392 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert podName:38100ae6-51a8-4a49-87d1-704ea8b5a0bc nodeName:}" failed. No retries permitted until 2025-12-01 18:50:46.6863769 +0000 UTC m=+1260.708006159 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert") pod "infra-operator-controller-manager-57548d458d-k9kcp" (UID: "38100ae6-51a8-4a49-87d1-704ea8b5a0bc") : secret "infra-operator-webhook-server-cert" not found Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.206271 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.207558 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.207626 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.215822 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-g9vlr" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.228429 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.229798 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.232963 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-q9d7w" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.238980 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzstd\" (UniqueName: \"kubernetes.io/projected/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-kube-api-access-lzstd\") pod \"infra-operator-controller-manager-57548d458d-k9kcp\" (UID: \"38100ae6-51a8-4a49-87d1-704ea8b5a0bc\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.245297 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn798\" (UniqueName: \"kubernetes.io/projected/671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161-kube-api-access-rn798\") pod \"horizon-operator-controller-manager-68c6d99b8f-zx4xc\" (UID: \"671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.256469 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.258005 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.258553 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.263638 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-czvhr" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.302787 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz72j\" (UniqueName: \"kubernetes.io/projected/d0336662-89bd-415e-8c22-2b05bf5dbf9f-kube-api-access-hz72j\") pod \"ironic-operator-controller-manager-6c548fd776-vtzkd\" (UID: \"d0336662-89bd-415e-8c22-2b05bf5dbf9f\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.302984 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85t78\" (UniqueName: \"kubernetes.io/projected/8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6-kube-api-access-85t78\") pod \"keystone-operator-controller-manager-546d4bdf48-vk8hm\" (UID: \"8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.303027 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44cxl\" (UniqueName: \"kubernetes.io/projected/d19dd9f6-38a2-4bdb-be7b-54184b15b7ab-kube-api-access-44cxl\") pod \"mariadb-operator-controller-manager-56bbcc9d85-qth5v\" (UID: \"d19dd9f6-38a2-4bdb-be7b-54184b15b7ab\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.303130 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4728w\" (UniqueName: \"kubernetes.io/projected/07d2f6f9-58fc-4c36-b2b8-ce0c48424c28-kube-api-access-4728w\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-wj722\" (UID: \"07d2f6f9-58fc-4c36-b2b8-ce0c48424c28\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.306562 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.323424 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.325934 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.326777 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85t78\" (UniqueName: \"kubernetes.io/projected/8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6-kube-api-access-85t78\") pod \"keystone-operator-controller-manager-546d4bdf48-vk8hm\" (UID: \"8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.327824 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-ct9rt" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.339811 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.352025 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz72j\" (UniqueName: \"kubernetes.io/projected/d0336662-89bd-415e-8c22-2b05bf5dbf9f-kube-api-access-hz72j\") pod \"ironic-operator-controller-manager-6c548fd776-vtzkd\" (UID: \"d0336662-89bd-415e-8c22-2b05bf5dbf9f\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.370468 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.397009 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.406297 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.408043 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44cxl\" (UniqueName: \"kubernetes.io/projected/d19dd9f6-38a2-4bdb-be7b-54184b15b7ab-kube-api-access-44cxl\") pod \"mariadb-operator-controller-manager-56bbcc9d85-qth5v\" (UID: \"d19dd9f6-38a2-4bdb-be7b-54184b15b7ab\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.408200 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4728w\" (UniqueName: \"kubernetes.io/projected/07d2f6f9-58fc-4c36-b2b8-ce0c48424c28-kube-api-access-4728w\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-wj722\" (UID: \"07d2f6f9-58fc-4c36-b2b8-ce0c48424c28\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.408294 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsgdv\" (UniqueName: \"kubernetes.io/projected/61654c85-dd73-48d3-9931-1ce7095e4f07-kube-api-access-rsgdv\") pod \"nova-operator-controller-manager-697bc559fc-8xpwk\" (UID: \"61654c85-dd73-48d3-9931-1ce7095e4f07\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.408418 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqzmx\" (UniqueName: \"kubernetes.io/projected/b5460053-e8df-4350-a4a4-ff44683d9f60-kube-api-access-rqzmx\") pod \"manila-operator-controller-manager-6546668bfd-7ppgb\" (UID: \"b5460053-e8df-4350-a4a4-ff44683d9f60\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.421774 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-25tbb"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.428890 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4728w\" (UniqueName: \"kubernetes.io/projected/07d2f6f9-58fc-4c36-b2b8-ce0c48424c28-kube-api-access-4728w\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-wj722\" (UID: \"07d2f6f9-58fc-4c36-b2b8-ce0c48424c28\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.429580 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44cxl\" (UniqueName: \"kubernetes.io/projected/d19dd9f6-38a2-4bdb-be7b-54184b15b7ab-kube-api-access-44cxl\") pod \"mariadb-operator-controller-manager-56bbcc9d85-qth5v\" (UID: \"d19dd9f6-38a2-4bdb-be7b-54184b15b7ab\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.431594 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-25tbb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.439520 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-c586p" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.450324 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-25tbb"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.521173 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsgdv\" (UniqueName: \"kubernetes.io/projected/61654c85-dd73-48d3-9931-1ce7095e4f07-kube-api-access-rsgdv\") pod \"nova-operator-controller-manager-697bc559fc-8xpwk\" (UID: \"61654c85-dd73-48d3-9931-1ce7095e4f07\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.521256 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqzmx\" (UniqueName: \"kubernetes.io/projected/b5460053-e8df-4350-a4a4-ff44683d9f60-kube-api-access-rqzmx\") pod \"manila-operator-controller-manager-6546668bfd-7ppgb\" (UID: \"b5460053-e8df-4350-a4a4-ff44683d9f60\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.521296 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb86s\" (UniqueName: \"kubernetes.io/projected/114bfc93-038f-416e-8a85-f1697387b2e2-kube-api-access-cb86s\") pod \"octavia-operator-controller-manager-998648c74-25tbb\" (UID: \"114bfc93-038f-416e-8a85-f1697387b2e2\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-25tbb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.575829 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsgdv\" (UniqueName: \"kubernetes.io/projected/61654c85-dd73-48d3-9931-1ce7095e4f07-kube-api-access-rsgdv\") pod \"nova-operator-controller-manager-697bc559fc-8xpwk\" (UID: \"61654c85-dd73-48d3-9931-1ce7095e4f07\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.582877 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqzmx\" (UniqueName: \"kubernetes.io/projected/b5460053-e8df-4350-a4a4-ff44683d9f60-kube-api-access-rqzmx\") pod \"manila-operator-controller-manager-6546668bfd-7ppgb\" (UID: \"b5460053-e8df-4350-a4a4-ff44683d9f60\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.584919 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.586313 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.587304 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.587385 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.587535 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.598930 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.599211 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-7w6k5" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.599542 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-fhts7" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.608357 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-xfwqg" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.613675 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.616287 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.623836 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb86s\" (UniqueName: \"kubernetes.io/projected/114bfc93-038f-416e-8a85-f1697387b2e2-kube-api-access-cb86s\") pod \"octavia-operator-controller-manager-998648c74-25tbb\" (UID: \"114bfc93-038f-416e-8a85-f1697387b2e2\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-25tbb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.629970 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.631955 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.636692 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.664874 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-g9vlr" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.666129 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.666187 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.668779 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-q9d7w" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.669302 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-nzkp2" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.669309 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.673374 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.675026 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.675380 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.682257 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-kg6sr" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.682661 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-rt569" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.688335 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-czvhr" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.689886 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb86s\" (UniqueName: \"kubernetes.io/projected/114bfc93-038f-416e-8a85-f1697387b2e2-kube-api-access-cb86s\") pod \"octavia-operator-controller-manager-998648c74-25tbb\" (UID: \"114bfc93-038f-416e-8a85-f1697387b2e2\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-25tbb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.696539 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.707964 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.721766 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-ct9rt" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.726638 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.727353 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw\" (UID: \"fef2b5dc-a162-4b91-ada5-f6af85d8fe20\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.727554 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klfbd\" (UniqueName: \"kubernetes.io/projected/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-kube-api-access-klfbd\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw\" (UID: \"fef2b5dc-a162-4b91-ada5-f6af85d8fe20\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.730598 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75pvw\" (UniqueName: \"kubernetes.io/projected/0fa3cc8f-0a56-4bec-8afb-3fb3599fb222-kube-api-access-75pvw\") pod \"ovn-operator-controller-manager-b6456fdb6-wmr2q\" (UID: \"0fa3cc8f-0a56-4bec-8afb-3fb3599fb222\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.730699 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sj85\" (UniqueName: \"kubernetes.io/projected/50e604ea-ddfe-470b-bbbf-b65a5948d9d7-kube-api-access-5sj85\") pod \"placement-operator-controller-manager-78f8948974-kg6tq\" (UID: \"50e604ea-ddfe-470b-bbbf-b65a5948d9d7\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.730870 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert\") pod \"infra-operator-controller-manager-57548d458d-k9kcp\" (UID: \"38100ae6-51a8-4a49-87d1-704ea8b5a0bc\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:50:46 crc kubenswrapper[4935]: E1201 18:50:46.731127 4935 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 18:50:46 crc kubenswrapper[4935]: E1201 18:50:46.731203 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert podName:38100ae6-51a8-4a49-87d1-704ea8b5a0bc nodeName:}" failed. No retries permitted until 2025-12-01 18:50:47.731185035 +0000 UTC m=+1261.752814294 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert") pod "infra-operator-controller-manager-57548d458d-k9kcp" (UID: "38100ae6-51a8-4a49-87d1-704ea8b5a0bc") : secret "infra-operator-webhook-server-cert" not found Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.740934 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.781030 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-c586p" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.788439 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.790012 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.792252 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-25tbb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.804245 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-zc7mx" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.815972 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.834389 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-254m7\" (UniqueName: \"kubernetes.io/projected/d834cbf1-7527-4530-94ca-a0188780da7d-kube-api-access-254m7\") pod \"swift-operator-controller-manager-5f8c65bbfc-slbss\" (UID: \"d834cbf1-7527-4530-94ca-a0188780da7d\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.834483 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv9jx\" (UniqueName: \"kubernetes.io/projected/6d09f2a0-653e-417a-8fee-53935bc27816-kube-api-access-cv9jx\") pod \"telemetry-operator-controller-manager-7445b68fd8-4tjzb\" (UID: \"6d09f2a0-653e-417a-8fee-53935bc27816\") " pod="openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.834525 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw\" (UID: \"fef2b5dc-a162-4b91-ada5-f6af85d8fe20\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.834584 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klfbd\" (UniqueName: \"kubernetes.io/projected/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-kube-api-access-klfbd\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw\" (UID: \"fef2b5dc-a162-4b91-ada5-f6af85d8fe20\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.834618 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75pvw\" (UniqueName: \"kubernetes.io/projected/0fa3cc8f-0a56-4bec-8afb-3fb3599fb222-kube-api-access-75pvw\") pod \"ovn-operator-controller-manager-b6456fdb6-wmr2q\" (UID: \"0fa3cc8f-0a56-4bec-8afb-3fb3599fb222\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.834646 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5sj85\" (UniqueName: \"kubernetes.io/projected/50e604ea-ddfe-470b-bbbf-b65a5948d9d7-kube-api-access-5sj85\") pod \"placement-operator-controller-manager-78f8948974-kg6tq\" (UID: \"50e604ea-ddfe-470b-bbbf-b65a5948d9d7\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" Dec 01 18:50:46 crc kubenswrapper[4935]: E1201 18:50:46.835039 4935 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 18:50:46 crc kubenswrapper[4935]: E1201 18:50:46.835076 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert podName:fef2b5dc-a162-4b91-ada5-f6af85d8fe20 nodeName:}" failed. No retries permitted until 2025-12-01 18:50:47.335063865 +0000 UTC m=+1261.356693124 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" (UID: "fef2b5dc-a162-4b91-ada5-f6af85d8fe20") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.854446 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5sj85\" (UniqueName: \"kubernetes.io/projected/50e604ea-ddfe-470b-bbbf-b65a5948d9d7-kube-api-access-5sj85\") pod \"placement-operator-controller-manager-78f8948974-kg6tq\" (UID: \"50e604ea-ddfe-470b-bbbf-b65a5948d9d7\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.871720 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75pvw\" (UniqueName: \"kubernetes.io/projected/0fa3cc8f-0a56-4bec-8afb-3fb3599fb222-kube-api-access-75pvw\") pod \"ovn-operator-controller-manager-b6456fdb6-wmr2q\" (UID: \"0fa3cc8f-0a56-4bec-8afb-3fb3599fb222\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.873347 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klfbd\" (UniqueName: \"kubernetes.io/projected/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-kube-api-access-klfbd\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw\" (UID: \"fef2b5dc-a162-4b91-ada5-f6af85d8fe20\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.915599 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.917940 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.920739 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-l5dsk" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.937663 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-254m7\" (UniqueName: \"kubernetes.io/projected/d834cbf1-7527-4530-94ca-a0188780da7d-kube-api-access-254m7\") pod \"swift-operator-controller-manager-5f8c65bbfc-slbss\" (UID: \"d834cbf1-7527-4530-94ca-a0188780da7d\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.937744 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv9jx\" (UniqueName: \"kubernetes.io/projected/6d09f2a0-653e-417a-8fee-53935bc27816-kube-api-access-cv9jx\") pod \"telemetry-operator-controller-manager-7445b68fd8-4tjzb\" (UID: \"6d09f2a0-653e-417a-8fee-53935bc27816\") " pod="openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.937828 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5gkn\" (UniqueName: \"kubernetes.io/projected/e553c27c-e8f0-4617-a914-46c8b5cfc33b-kube-api-access-t5gkn\") pod \"test-operator-controller-manager-5854674fcc-mhqkw\" (UID: \"e553c27c-e8f0-4617-a914-46c8b5cfc33b\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.946521 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.955103 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.962692 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-254m7\" (UniqueName: \"kubernetes.io/projected/d834cbf1-7527-4530-94ca-a0188780da7d-kube-api-access-254m7\") pod \"swift-operator-controller-manager-5f8c65bbfc-slbss\" (UID: \"d834cbf1-7527-4530-94ca-a0188780da7d\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.974913 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv9jx\" (UniqueName: \"kubernetes.io/projected/6d09f2a0-653e-417a-8fee-53935bc27816-kube-api-access-cv9jx\") pod \"telemetry-operator-controller-manager-7445b68fd8-4tjzb\" (UID: \"6d09f2a0-653e-417a-8fee-53935bc27816\") " pod="openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.980505 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62"] Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.982377 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:46 crc kubenswrapper[4935]: I1201 18:50:46.994286 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62"] Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.000533 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.000662 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.001010 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-klpw2" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.002445 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.004422 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6wpv9"] Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.025403 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.029120 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6wpv9"] Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.029225 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6wpv9" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.032050 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-7hprg" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.047375 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.052251 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5gkn\" (UniqueName: \"kubernetes.io/projected/e553c27c-e8f0-4617-a914-46c8b5cfc33b-kube-api-access-t5gkn\") pod \"test-operator-controller-manager-5854674fcc-mhqkw\" (UID: \"e553c27c-e8f0-4617-a914-46c8b5cfc33b\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.052528 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b5j7\" (UniqueName: \"kubernetes.io/projected/c5fb0811-5cb8-4bff-927c-99f4e08b8ae0-kube-api-access-4b5j7\") pod \"watcher-operator-controller-manager-769dc69bc-82j75\" (UID: \"c5fb0811-5cb8-4bff-927c-99f4e08b8ae0\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.074807 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6"] Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.105947 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5gkn\" (UniqueName: \"kubernetes.io/projected/e553c27c-e8f0-4617-a914-46c8b5cfc33b-kube-api-access-t5gkn\") pod \"test-operator-controller-manager-5854674fcc-mhqkw\" (UID: \"e553c27c-e8f0-4617-a914-46c8b5cfc33b\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.138464 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.161776 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w"] Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.162999 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8fr7\" (UniqueName: \"kubernetes.io/projected/367ed696-aab2-40a9-bdd8-04b4a5e928d0-kube-api-access-z8fr7\") pod \"rabbitmq-cluster-operator-manager-668c99d594-6wpv9\" (UID: \"367ed696-aab2-40a9-bdd8-04b4a5e928d0\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6wpv9" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.163568 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.163658 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.163681 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9k5r\" (UniqueName: \"kubernetes.io/projected/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-kube-api-access-l9k5r\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.163703 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b5j7\" (UniqueName: \"kubernetes.io/projected/c5fb0811-5cb8-4bff-927c-99f4e08b8ae0-kube-api-access-4b5j7\") pod \"watcher-operator-controller-manager-769dc69bc-82j75\" (UID: \"c5fb0811-5cb8-4bff-927c-99f4e08b8ae0\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.189096 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq"] Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.199979 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb"] Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.209448 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b5j7\" (UniqueName: \"kubernetes.io/projected/c5fb0811-5cb8-4bff-927c-99f4e08b8ae0-kube-api-access-4b5j7\") pod \"watcher-operator-controller-manager-769dc69bc-82j75\" (UID: \"c5fb0811-5cb8-4bff-927c-99f4e08b8ae0\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.254101 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" Dec 01 18:50:47 crc kubenswrapper[4935]: W1201 18:50:47.258607 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod122ce04b_8536_401a_820f_fd1b9f04afcf.slice/crio-bdf463d5f35e4316266c1e7f08691383b11b73166b6840678dadb95c479416a0 WatchSource:0}: Error finding container bdf463d5f35e4316266c1e7f08691383b11b73166b6840678dadb95c479416a0: Status 404 returned error can't find the container with id bdf463d5f35e4316266c1e7f08691383b11b73166b6840678dadb95c479416a0 Dec 01 18:50:47 crc kubenswrapper[4935]: W1201 18:50:47.263057 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3aa8650_ce39_4eb2_8cae_eb012347abb6.slice/crio-9978c46fe176e6d7e7b8ebebab8506dccab5090d6b09148051bef32bf2814685 WatchSource:0}: Error finding container 9978c46fe176e6d7e7b8ebebab8506dccab5090d6b09148051bef32bf2814685: Status 404 returned error can't find the container with id 9978c46fe176e6d7e7b8ebebab8506dccab5090d6b09148051bef32bf2814685 Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.265367 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.265453 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.265475 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9k5r\" (UniqueName: \"kubernetes.io/projected/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-kube-api-access-l9k5r\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.265506 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8fr7\" (UniqueName: \"kubernetes.io/projected/367ed696-aab2-40a9-bdd8-04b4a5e928d0-kube-api-access-z8fr7\") pod \"rabbitmq-cluster-operator-manager-668c99d594-6wpv9\" (UID: \"367ed696-aab2-40a9-bdd8-04b4a5e928d0\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6wpv9" Dec 01 18:50:47 crc kubenswrapper[4935]: E1201 18:50:47.265893 4935 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 18:50:47 crc kubenswrapper[4935]: E1201 18:50:47.265932 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs podName:96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325 nodeName:}" failed. No retries permitted until 2025-12-01 18:50:47.765919143 +0000 UTC m=+1261.787548402 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs") pod "openstack-operator-controller-manager-96bb7f5d4-59p62" (UID: "96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325") : secret "webhook-server-cert" not found Dec 01 18:50:47 crc kubenswrapper[4935]: E1201 18:50:47.266075 4935 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 18:50:47 crc kubenswrapper[4935]: E1201 18:50:47.266101 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs podName:96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325 nodeName:}" failed. No retries permitted until 2025-12-01 18:50:47.766095069 +0000 UTC m=+1261.787724328 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs") pod "openstack-operator-controller-manager-96bb7f5d4-59p62" (UID: "96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325") : secret "metrics-server-cert" not found Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.296704 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9k5r\" (UniqueName: \"kubernetes.io/projected/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-kube-api-access-l9k5r\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.297048 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8fr7\" (UniqueName: \"kubernetes.io/projected/367ed696-aab2-40a9-bdd8-04b4a5e928d0-kube-api-access-z8fr7\") pod \"rabbitmq-cluster-operator-manager-668c99d594-6wpv9\" (UID: \"367ed696-aab2-40a9-bdd8-04b4a5e928d0\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6wpv9" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.304311 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6wpv9" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.370459 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw\" (UID: \"fef2b5dc-a162-4b91-ada5-f6af85d8fe20\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:50:47 crc kubenswrapper[4935]: E1201 18:50:47.370745 4935 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 18:50:47 crc kubenswrapper[4935]: E1201 18:50:47.370805 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert podName:fef2b5dc-a162-4b91-ada5-f6af85d8fe20 nodeName:}" failed. No retries permitted until 2025-12-01 18:50:48.370783746 +0000 UTC m=+1262.392413005 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" (UID: "fef2b5dc-a162-4b91-ada5-f6af85d8fe20") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.638742 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w" event={"ID":"8a1ded04-5c24-467c-a51b-c0cfbe67ba4b","Type":"ContainerStarted","Data":"c5ed573cf7b36f147727c49b686238ff4df9f91e909e982a09488dc72d65d1ae"} Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.647422 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq" event={"ID":"122ce04b-8536-401a-820f-fd1b9f04afcf","Type":"ContainerStarted","Data":"bdf463d5f35e4316266c1e7f08691383b11b73166b6840678dadb95c479416a0"} Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.649379 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6" event={"ID":"a66fb641-eb39-4326-a4cb-d4e006a57436","Type":"ContainerStarted","Data":"587b60871f6bc18c00f7cd88d25c76c2406e90c1fb98c58925b2e9d25de5ca3a"} Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.654182 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb" event={"ID":"e3aa8650-ce39-4eb2-8cae-eb012347abb6","Type":"ContainerStarted","Data":"9978c46fe176e6d7e7b8ebebab8506dccab5090d6b09148051bef32bf2814685"} Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.655745 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv"] Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.687326 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc"] Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.697241 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm"] Dec 01 18:50:47 crc kubenswrapper[4935]: W1201 18:50:47.708542 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c32d7fe_2d91_47ac_b0ad_c0b0d5cab9f6.slice/crio-0158a661ee46fb8083cbb8f3025e7b7e68a1d0cb4b6d7c995756f9431c08ba01 WatchSource:0}: Error finding container 0158a661ee46fb8083cbb8f3025e7b7e68a1d0cb4b6d7c995756f9431c08ba01: Status 404 returned error can't find the container with id 0158a661ee46fb8083cbb8f3025e7b7e68a1d0cb4b6d7c995756f9431c08ba01 Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.777403 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.777994 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:47 crc kubenswrapper[4935]: I1201 18:50:47.778138 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert\") pod \"infra-operator-controller-manager-57548d458d-k9kcp\" (UID: \"38100ae6-51a8-4a49-87d1-704ea8b5a0bc\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:50:47 crc kubenswrapper[4935]: E1201 18:50:47.777567 4935 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 18:50:47 crc kubenswrapper[4935]: E1201 18:50:47.778409 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs podName:96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325 nodeName:}" failed. No retries permitted until 2025-12-01 18:50:48.778393537 +0000 UTC m=+1262.800022806 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs") pod "openstack-operator-controller-manager-96bb7f5d4-59p62" (UID: "96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325") : secret "webhook-server-cert" not found Dec 01 18:50:47 crc kubenswrapper[4935]: E1201 18:50:47.779936 4935 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 18:50:47 crc kubenswrapper[4935]: E1201 18:50:47.780034 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs podName:96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325 nodeName:}" failed. No retries permitted until 2025-12-01 18:50:48.78002455 +0000 UTC m=+1262.801653809 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs") pod "openstack-operator-controller-manager-96bb7f5d4-59p62" (UID: "96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325") : secret "metrics-server-cert" not found Dec 01 18:50:47 crc kubenswrapper[4935]: E1201 18:50:47.778313 4935 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 18:50:47 crc kubenswrapper[4935]: E1201 18:50:47.780233 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert podName:38100ae6-51a8-4a49-87d1-704ea8b5a0bc nodeName:}" failed. No retries permitted until 2025-12-01 18:50:49.780197196 +0000 UTC m=+1263.801826455 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert") pod "infra-operator-controller-manager-57548d458d-k9kcp" (UID: "38100ae6-51a8-4a49-87d1-704ea8b5a0bc") : secret "infra-operator-webhook-server-cert" not found Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.133246 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v"] Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.143666 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd"] Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.169404 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-25tbb"] Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.194357 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722"] Dec 01 18:50:48 crc kubenswrapper[4935]: W1201 18:50:48.195912 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5460053_e8df_4350_a4a4_ff44683d9f60.slice/crio-4704d5205d1e2b147547266f6b2eabb66efa4437bfa5b32587701874f7eb48ec WatchSource:0}: Error finding container 4704d5205d1e2b147547266f6b2eabb66efa4437bfa5b32587701874f7eb48ec: Status 404 returned error can't find the container with id 4704d5205d1e2b147547266f6b2eabb66efa4437bfa5b32587701874f7eb48ec Dec 01 18:50:48 crc kubenswrapper[4935]: W1201 18:50:48.219437 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61654c85_dd73_48d3_9931_1ce7095e4f07.slice/crio-c66e74f1ccbf692292aa71f0d5582953d5c09ec7ebc21714c954dd6fe6a203ed WatchSource:0}: Error finding container c66e74f1ccbf692292aa71f0d5582953d5c09ec7ebc21714c954dd6fe6a203ed: Status 404 returned error can't find the container with id c66e74f1ccbf692292aa71f0d5582953d5c09ec7ebc21714c954dd6fe6a203ed Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.220319 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb"] Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.229998 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk"] Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.394220 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw\" (UID: \"fef2b5dc-a162-4b91-ada5-f6af85d8fe20\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.394428 4935 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.394809 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert podName:fef2b5dc-a162-4b91-ada5-f6af85d8fe20 nodeName:}" failed. No retries permitted until 2025-12-01 18:50:50.394785652 +0000 UTC m=+1264.416414911 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" (UID: "fef2b5dc-a162-4b91-ada5-f6af85d8fe20") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.567581 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75"] Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.580747 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw"] Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.591471 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q"] Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.599864 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb"] Dec 01 18:50:48 crc kubenswrapper[4935]: W1201 18:50:48.608779 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5fb0811_5cb8_4bff_927c_99f4e08b8ae0.slice/crio-5bb0404b5cd44b590995c8768efdd99f189f47eaf5006916f8a684dcbe4bccc8 WatchSource:0}: Error finding container 5bb0404b5cd44b590995c8768efdd99f189f47eaf5006916f8a684dcbe4bccc8: Status 404 returned error can't find the container with id 5bb0404b5cd44b590995c8768efdd99f189f47eaf5006916f8a684dcbe4bccc8 Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.612413 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6wpv9"] Dec 01 18:50:48 crc kubenswrapper[4935]: W1201 18:50:48.616411 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d09f2a0_653e_417a_8fee_53935bc27816.slice/crio-9a0de88c7003a53cfa2f642d9b10a19fe4b58b7a8222206ddebd3878224b8bb3 WatchSource:0}: Error finding container 9a0de88c7003a53cfa2f642d9b10a19fe4b58b7a8222206ddebd3878224b8bb3: Status 404 returned error can't find the container with id 9a0de88c7003a53cfa2f642d9b10a19fe4b58b7a8222206ddebd3878224b8bb3 Dec 01 18:50:48 crc kubenswrapper[4935]: W1201 18:50:48.617222 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd834cbf1_7527_4530_94ca_a0188780da7d.slice/crio-fccf7367178178ed17c58e19210f2d2290145b3f32660719f2c3249b7b48bc6e WatchSource:0}: Error finding container fccf7367178178ed17c58e19210f2d2290145b3f32660719f2c3249b7b48bc6e: Status 404 returned error can't find the container with id fccf7367178178ed17c58e19210f2d2290145b3f32660719f2c3249b7b48bc6e Dec 01 18:50:48 crc kubenswrapper[4935]: W1201 18:50:48.617533 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode553c27c_e8f0_4617_a914_46c8b5cfc33b.slice/crio-8142b66fd134e920a2f7d20d576406de3e78682a463ae09172262f0f36d676eb WatchSource:0}: Error finding container 8142b66fd134e920a2f7d20d576406de3e78682a463ae09172262f0f36d676eb: Status 404 returned error can't find the container with id 8142b66fd134e920a2f7d20d576406de3e78682a463ae09172262f0f36d676eb Dec 01 18:50:48 crc kubenswrapper[4935]: W1201 18:50:48.620727 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod367ed696_aab2_40a9_bdd8_04b4a5e928d0.slice/crio-86000729a3a23ac6ea39827cf557803a0316933ff5cf5a6f150462b25039f83d WatchSource:0}: Error finding container 86000729a3a23ac6ea39827cf557803a0316933ff5cf5a6f150462b25039f83d: Status 404 returned error can't find the container with id 86000729a3a23ac6ea39827cf557803a0316933ff5cf5a6f150462b25039f83d Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.620842 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq"] Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.628382 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4b5j7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-82j75_openstack-operators(c5fb0811-5cb8-4bff-927c-99f4e08b8ae0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.628967 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5sj85,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-kg6tq_openstack-operators(50e604ea-ddfe-470b-bbbf-b65a5948d9d7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.629121 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss"] Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.631660 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4b5j7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-82j75_openstack-operators(c5fb0811-5cb8-4bff-927c-99f4e08b8ae0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.632961 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" podUID="c5fb0811-5cb8-4bff-927c-99f4e08b8ae0" Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.633747 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5sj85,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-kg6tq_openstack-operators(50e604ea-ddfe-470b-bbbf-b65a5948d9d7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.634928 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" podUID="50e604ea-ddfe-470b-bbbf-b65a5948d9d7" Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.669637 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss" event={"ID":"d834cbf1-7527-4530-94ca-a0188780da7d","Type":"ContainerStarted","Data":"fccf7367178178ed17c58e19210f2d2290145b3f32660719f2c3249b7b48bc6e"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.672444 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6wpv9" event={"ID":"367ed696-aab2-40a9-bdd8-04b4a5e928d0","Type":"ContainerStarted","Data":"86000729a3a23ac6ea39827cf557803a0316933ff5cf5a6f150462b25039f83d"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.674923 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb" event={"ID":"6d09f2a0-653e-417a-8fee-53935bc27816","Type":"ContainerStarted","Data":"9a0de88c7003a53cfa2f642d9b10a19fe4b58b7a8222206ddebd3878224b8bb3"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.677651 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv" event={"ID":"90db9fa3-a008-4a95-910d-fd7b92f37dea","Type":"ContainerStarted","Data":"82aa280570fcb981aca50a496448e56c68d49fe794d8ce6f6f0dbb2627b4ef2e"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.679907 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb" event={"ID":"b5460053-e8df-4350-a4a4-ff44683d9f60","Type":"ContainerStarted","Data":"4704d5205d1e2b147547266f6b2eabb66efa4437bfa5b32587701874f7eb48ec"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.684550 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm" event={"ID":"8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6","Type":"ContainerStarted","Data":"0158a661ee46fb8083cbb8f3025e7b7e68a1d0cb4b6d7c995756f9431c08ba01"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.685998 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q" event={"ID":"0fa3cc8f-0a56-4bec-8afb-3fb3599fb222","Type":"ContainerStarted","Data":"dfb3c584ee9f349c084e8171c042098a58a64a2de5b047af5198c96eec21e89e"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.687940 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-25tbb" event={"ID":"114bfc93-038f-416e-8a85-f1697387b2e2","Type":"ContainerStarted","Data":"47feb6385ce875d28d7b8c1f8777ed0d9762ad10a686cc879696ba0f85954d4a"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.690354 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722" event={"ID":"07d2f6f9-58fc-4c36-b2b8-ce0c48424c28","Type":"ContainerStarted","Data":"a46ea93a3eeb69e160e27b59482031082356b125de053d64fa57f31d82210056"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.691596 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v" event={"ID":"d19dd9f6-38a2-4bdb-be7b-54184b15b7ab","Type":"ContainerStarted","Data":"6c16119f290ab0ecb2b7795672085ebe02ae68104db86e591944cf8914ff8b14"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.694272 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk" event={"ID":"61654c85-dd73-48d3-9931-1ce7095e4f07","Type":"ContainerStarted","Data":"c66e74f1ccbf692292aa71f0d5582953d5c09ec7ebc21714c954dd6fe6a203ed"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.696001 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" event={"ID":"c5fb0811-5cb8-4bff-927c-99f4e08b8ae0","Type":"ContainerStarted","Data":"5bb0404b5cd44b590995c8768efdd99f189f47eaf5006916f8a684dcbe4bccc8"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.699547 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd" event={"ID":"d0336662-89bd-415e-8c22-2b05bf5dbf9f","Type":"ContainerStarted","Data":"9a1ffd3324a95387d0570a3587b00eeffcddc6af83f178075e5639df2db9aae0"} Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.700378 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" podUID="c5fb0811-5cb8-4bff-927c-99f4e08b8ae0" Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.702372 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw" event={"ID":"e553c27c-e8f0-4617-a914-46c8b5cfc33b","Type":"ContainerStarted","Data":"8142b66fd134e920a2f7d20d576406de3e78682a463ae09172262f0f36d676eb"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.704199 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" event={"ID":"50e604ea-ddfe-470b-bbbf-b65a5948d9d7","Type":"ContainerStarted","Data":"f2c6531015c13123d257ab59f4afb2e4ae7454eeb36d8954f8bfeaf5f554cdb6"} Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.706469 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" podUID="50e604ea-ddfe-470b-bbbf-b65a5948d9d7" Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.708646 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc" event={"ID":"671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161","Type":"ContainerStarted","Data":"527ae0da85014bd255caafa599ed00f17e6d2dd0b2e1fadbe39907d009763c63"} Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.810384 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.810597 4935 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.810743 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs podName:96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325 nodeName:}" failed. No retries permitted until 2025-12-01 18:50:50.810724325 +0000 UTC m=+1264.832353584 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs") pod "openstack-operator-controller-manager-96bb7f5d4-59p62" (UID: "96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325") : secret "metrics-server-cert" not found Dec 01 18:50:48 crc kubenswrapper[4935]: I1201 18:50:48.811271 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.811409 4935 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 18:50:48 crc kubenswrapper[4935]: E1201 18:50:48.811443 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs podName:96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325 nodeName:}" failed. No retries permitted until 2025-12-01 18:50:50.811433228 +0000 UTC m=+1264.833062487 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs") pod "openstack-operator-controller-manager-96bb7f5d4-59p62" (UID: "96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325") : secret "webhook-server-cert" not found Dec 01 18:50:49 crc kubenswrapper[4935]: E1201 18:50:49.746005 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" podUID="c5fb0811-5cb8-4bff-927c-99f4e08b8ae0" Dec 01 18:50:49 crc kubenswrapper[4935]: E1201 18:50:49.752853 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" podUID="50e604ea-ddfe-470b-bbbf-b65a5948d9d7" Dec 01 18:50:49 crc kubenswrapper[4935]: I1201 18:50:49.831199 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert\") pod \"infra-operator-controller-manager-57548d458d-k9kcp\" (UID: \"38100ae6-51a8-4a49-87d1-704ea8b5a0bc\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:50:49 crc kubenswrapper[4935]: E1201 18:50:49.831413 4935 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 18:50:49 crc kubenswrapper[4935]: E1201 18:50:49.831460 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert podName:38100ae6-51a8-4a49-87d1-704ea8b5a0bc nodeName:}" failed. No retries permitted until 2025-12-01 18:50:53.831444886 +0000 UTC m=+1267.853074145 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert") pod "infra-operator-controller-manager-57548d458d-k9kcp" (UID: "38100ae6-51a8-4a49-87d1-704ea8b5a0bc") : secret "infra-operator-webhook-server-cert" not found Dec 01 18:50:50 crc kubenswrapper[4935]: I1201 18:50:50.441806 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw\" (UID: \"fef2b5dc-a162-4b91-ada5-f6af85d8fe20\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:50:50 crc kubenswrapper[4935]: E1201 18:50:50.442024 4935 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 18:50:50 crc kubenswrapper[4935]: E1201 18:50:50.442238 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert podName:fef2b5dc-a162-4b91-ada5-f6af85d8fe20 nodeName:}" failed. No retries permitted until 2025-12-01 18:50:54.442215688 +0000 UTC m=+1268.463844947 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" (UID: "fef2b5dc-a162-4b91-ada5-f6af85d8fe20") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 18:50:50 crc kubenswrapper[4935]: I1201 18:50:50.852156 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:50 crc kubenswrapper[4935]: I1201 18:50:50.852299 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:50 crc kubenswrapper[4935]: E1201 18:50:50.852356 4935 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 18:50:50 crc kubenswrapper[4935]: E1201 18:50:50.852428 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs podName:96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325 nodeName:}" failed. No retries permitted until 2025-12-01 18:50:54.852410303 +0000 UTC m=+1268.874039562 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs") pod "openstack-operator-controller-manager-96bb7f5d4-59p62" (UID: "96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325") : secret "metrics-server-cert" not found Dec 01 18:50:50 crc kubenswrapper[4935]: E1201 18:50:50.852464 4935 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 18:50:50 crc kubenswrapper[4935]: E1201 18:50:50.852553 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs podName:96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325 nodeName:}" failed. No retries permitted until 2025-12-01 18:50:54.852497576 +0000 UTC m=+1268.874126825 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs") pod "openstack-operator-controller-manager-96bb7f5d4-59p62" (UID: "96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325") : secret "webhook-server-cert" not found Dec 01 18:50:53 crc kubenswrapper[4935]: I1201 18:50:53.914530 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert\") pod \"infra-operator-controller-manager-57548d458d-k9kcp\" (UID: \"38100ae6-51a8-4a49-87d1-704ea8b5a0bc\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:50:53 crc kubenswrapper[4935]: E1201 18:50:53.914715 4935 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 18:50:53 crc kubenswrapper[4935]: E1201 18:50:53.915586 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert podName:38100ae6-51a8-4a49-87d1-704ea8b5a0bc nodeName:}" failed. No retries permitted until 2025-12-01 18:51:01.915556016 +0000 UTC m=+1275.937185345 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert") pod "infra-operator-controller-manager-57548d458d-k9kcp" (UID: "38100ae6-51a8-4a49-87d1-704ea8b5a0bc") : secret "infra-operator-webhook-server-cert" not found Dec 01 18:50:54 crc kubenswrapper[4935]: I1201 18:50:54.530808 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw\" (UID: \"fef2b5dc-a162-4b91-ada5-f6af85d8fe20\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:50:54 crc kubenswrapper[4935]: E1201 18:50:54.531085 4935 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 18:50:54 crc kubenswrapper[4935]: E1201 18:50:54.531257 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert podName:fef2b5dc-a162-4b91-ada5-f6af85d8fe20 nodeName:}" failed. No retries permitted until 2025-12-01 18:51:02.531220597 +0000 UTC m=+1276.552849936 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" (UID: "fef2b5dc-a162-4b91-ada5-f6af85d8fe20") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 18:50:54 crc kubenswrapper[4935]: I1201 18:50:54.936847 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:54 crc kubenswrapper[4935]: I1201 18:50:54.936980 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:50:54 crc kubenswrapper[4935]: E1201 18:50:54.937209 4935 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 18:50:54 crc kubenswrapper[4935]: E1201 18:50:54.937278 4935 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 18:50:54 crc kubenswrapper[4935]: E1201 18:50:54.937319 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs podName:96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325 nodeName:}" failed. No retries permitted until 2025-12-01 18:51:02.937291019 +0000 UTC m=+1276.958920308 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs") pod "openstack-operator-controller-manager-96bb7f5d4-59p62" (UID: "96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325") : secret "webhook-server-cert" not found Dec 01 18:50:54 crc kubenswrapper[4935]: E1201 18:50:54.937383 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs podName:96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325 nodeName:}" failed. No retries permitted until 2025-12-01 18:51:02.937356781 +0000 UTC m=+1276.958986070 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs") pod "openstack-operator-controller-manager-96bb7f5d4-59p62" (UID: "96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325") : secret "metrics-server-cert" not found Dec 01 18:51:02 crc kubenswrapper[4935]: I1201 18:51:02.003865 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert\") pod \"infra-operator-controller-manager-57548d458d-k9kcp\" (UID: \"38100ae6-51a8-4a49-87d1-704ea8b5a0bc\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:51:02 crc kubenswrapper[4935]: I1201 18:51:02.019466 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/38100ae6-51a8-4a49-87d1-704ea8b5a0bc-cert\") pod \"infra-operator-controller-manager-57548d458d-k9kcp\" (UID: \"38100ae6-51a8-4a49-87d1-704ea8b5a0bc\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:51:02 crc kubenswrapper[4935]: I1201 18:51:02.202272 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-zc9m4" Dec 01 18:51:02 crc kubenswrapper[4935]: I1201 18:51:02.209010 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:51:02 crc kubenswrapper[4935]: I1201 18:51:02.620435 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw\" (UID: \"fef2b5dc-a162-4b91-ada5-f6af85d8fe20\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:51:02 crc kubenswrapper[4935]: I1201 18:51:02.626018 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fef2b5dc-a162-4b91-ada5-f6af85d8fe20-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw\" (UID: \"fef2b5dc-a162-4b91-ada5-f6af85d8fe20\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:51:02 crc kubenswrapper[4935]: I1201 18:51:02.890872 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:51:03 crc kubenswrapper[4935]: I1201 18:51:03.027474 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:51:03 crc kubenswrapper[4935]: I1201 18:51:03.027608 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:51:03 crc kubenswrapper[4935]: E1201 18:51:03.027745 4935 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 18:51:03 crc kubenswrapper[4935]: E1201 18:51:03.027808 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs podName:96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325 nodeName:}" failed. No retries permitted until 2025-12-01 18:51:19.027793641 +0000 UTC m=+1293.049422900 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs") pod "openstack-operator-controller-manager-96bb7f5d4-59p62" (UID: "96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325") : secret "webhook-server-cert" not found Dec 01 18:51:03 crc kubenswrapper[4935]: I1201 18:51:03.031099 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-metrics-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:51:08 crc kubenswrapper[4935]: E1201 18:51:08.160070 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801" Dec 01 18:51:08 crc kubenswrapper[4935]: E1201 18:51:08.161077 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ktl9f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-859b6ccc6-w2v9w_openstack-operators(8a1ded04-5c24-467c-a51b-c0cfbe67ba4b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:51:09 crc kubenswrapper[4935]: E1201 18:51:09.322296 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429" Dec 01 18:51:09 crc kubenswrapper[4935]: E1201 18:51:09.322771 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4hmjb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-j7bwb_openstack-operators(e3aa8650-ce39-4eb2-8cae-eb012347abb6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:51:10 crc kubenswrapper[4935]: E1201 18:51:10.594815 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 01 18:51:10 crc kubenswrapper[4935]: E1201 18:51:10.595357 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rn798,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-zx4xc_openstack-operators(671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:51:13 crc kubenswrapper[4935]: E1201 18:51:13.350498 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:986861e5a0a9954f63581d9d55a30f8057883cefea489415d76257774526eea3" Dec 01 18:51:13 crc kubenswrapper[4935]: E1201 18:51:13.351069 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:986861e5a0a9954f63581d9d55a30f8057883cefea489415d76257774526eea3,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-85t78,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-546d4bdf48-vk8hm_openstack-operators(8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:51:14 crc kubenswrapper[4935]: E1201 18:51:14.068774 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85" Dec 01 18:51:14 crc kubenswrapper[4935]: E1201 18:51:14.068956 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ppddh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-7hmvv_openstack-operators(90db9fa3-a008-4a95-910d-fd7b92f37dea): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:51:14 crc kubenswrapper[4935]: E1201 18:51:14.547248 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 01 18:51:14 crc kubenswrapper[4935]: E1201 18:51:14.547750 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cb86s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-25tbb_openstack-operators(114bfc93-038f-416e-8a85-f1697387b2e2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:51:18 crc kubenswrapper[4935]: E1201 18:51:18.230702 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530" Dec 01 18:51:18 crc kubenswrapper[4935]: E1201 18:51:18.231120 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hz72j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-vtzkd_openstack-operators(d0336662-89bd-415e-8c22-2b05bf5dbf9f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:51:18 crc kubenswrapper[4935]: E1201 18:51:18.846303 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94" Dec 01 18:51:18 crc kubenswrapper[4935]: E1201 18:51:18.846565 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-t5gkn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-mhqkw_openstack-operators(e553c27c-e8f0-4617-a914-46c8b5cfc33b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:51:19 crc kubenswrapper[4935]: I1201 18:51:19.065983 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:51:19 crc kubenswrapper[4935]: I1201 18:51:19.090430 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325-webhook-certs\") pod \"openstack-operator-controller-manager-96bb7f5d4-59p62\" (UID: \"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325\") " pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:51:19 crc kubenswrapper[4935]: I1201 18:51:19.105078 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:51:20 crc kubenswrapper[4935]: E1201 18:51:20.477752 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 01 18:51:20 crc kubenswrapper[4935]: E1201 18:51:20.478157 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rsgdv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-8xpwk_openstack-operators(61654c85-dd73-48d3-9931-1ce7095e4f07): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:51:20 crc kubenswrapper[4935]: E1201 18:51:20.969114 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 01 18:51:20 crc kubenswrapper[4935]: E1201 18:51:20.969636 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z8fr7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-6wpv9_openstack-operators(367ed696-aab2-40a9-bdd8-04b4a5e928d0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:51:20 crc kubenswrapper[4935]: E1201 18:51:20.970836 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6wpv9" podUID="367ed696-aab2-40a9-bdd8-04b4a5e928d0" Dec 01 18:51:21 crc kubenswrapper[4935]: E1201 18:51:21.077054 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6wpv9" podUID="367ed696-aab2-40a9-bdd8-04b4a5e928d0" Dec 01 18:51:21 crc kubenswrapper[4935]: I1201 18:51:21.427361 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp"] Dec 01 18:51:21 crc kubenswrapper[4935]: I1201 18:51:21.434968 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw"] Dec 01 18:51:21 crc kubenswrapper[4935]: W1201 18:51:21.628388 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfef2b5dc_a162_4b91_ada5_f6af85d8fe20.slice/crio-4ac2b30638712f6f246d0bb279e815bdb05123e46763bf061e480dca716532d7 WatchSource:0}: Error finding container 4ac2b30638712f6f246d0bb279e815bdb05123e46763bf061e480dca716532d7: Status 404 returned error can't find the container with id 4ac2b30638712f6f246d0bb279e815bdb05123e46763bf061e480dca716532d7 Dec 01 18:51:21 crc kubenswrapper[4935]: W1201 18:51:21.636732 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38100ae6_51a8_4a49_87d1_704ea8b5a0bc.slice/crio-552566d5262ed82beee333bd4108efaa705cb285bc4135bffb54383ab88882e9 WatchSource:0}: Error finding container 552566d5262ed82beee333bd4108efaa705cb285bc4135bffb54383ab88882e9: Status 404 returned error can't find the container with id 552566d5262ed82beee333bd4108efaa705cb285bc4135bffb54383ab88882e9 Dec 01 18:51:22 crc kubenswrapper[4935]: I1201 18:51:22.056982 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62"] Dec 01 18:51:22 crc kubenswrapper[4935]: I1201 18:51:22.084366 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6" event={"ID":"a66fb641-eb39-4326-a4cb-d4e006a57436","Type":"ContainerStarted","Data":"c950669d38fc7c63adec2094fef09730f8575c952a8a486e82d3f55cad90e844"} Dec 01 18:51:22 crc kubenswrapper[4935]: I1201 18:51:22.085230 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" event={"ID":"38100ae6-51a8-4a49-87d1-704ea8b5a0bc","Type":"ContainerStarted","Data":"552566d5262ed82beee333bd4108efaa705cb285bc4135bffb54383ab88882e9"} Dec 01 18:51:22 crc kubenswrapper[4935]: I1201 18:51:22.086512 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" event={"ID":"fef2b5dc-a162-4b91-ada5-f6af85d8fe20","Type":"ContainerStarted","Data":"4ac2b30638712f6f246d0bb279e815bdb05123e46763bf061e480dca716532d7"} Dec 01 18:51:23 crc kubenswrapper[4935]: I1201 18:51:23.125642 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq" event={"ID":"122ce04b-8536-401a-820f-fd1b9f04afcf","Type":"ContainerStarted","Data":"9d245a8c963e426754dc294e798683d4970949eeab96fb92aab52d985d0b943d"} Dec 01 18:51:23 crc kubenswrapper[4935]: I1201 18:51:23.127538 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" event={"ID":"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325","Type":"ContainerStarted","Data":"072cac8a83a114076e6795cdb8aa788470a8c59c5763f40a0069aafb399ed25c"} Dec 01 18:51:23 crc kubenswrapper[4935]: I1201 18:51:23.128873 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss" event={"ID":"d834cbf1-7527-4530-94ca-a0188780da7d","Type":"ContainerStarted","Data":"57d6c7c28d3f99ad6d6996c11db5b7465150a8b8a5f28685a4870f42fc2a8151"} Dec 01 18:51:23 crc kubenswrapper[4935]: I1201 18:51:23.130259 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722" event={"ID":"07d2f6f9-58fc-4c36-b2b8-ce0c48424c28","Type":"ContainerStarted","Data":"5fad44c4f0166ece506234c3b94de92495a7965bab05efb9aec979aefe37667d"} Dec 01 18:51:23 crc kubenswrapper[4935]: I1201 18:51:23.131537 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q" event={"ID":"0fa3cc8f-0a56-4bec-8afb-3fb3599fb222","Type":"ContainerStarted","Data":"3465b369f66cd029479b48d5960a3c7d17d4532c00774c69db21cc8f625f6e0e"} Dec 01 18:51:23 crc kubenswrapper[4935]: I1201 18:51:23.134704 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v" event={"ID":"d19dd9f6-38a2-4bdb-be7b-54184b15b7ab","Type":"ContainerStarted","Data":"2bff5cc0ff43903a4cc4a1ec74d61180f2271dd0a857868f2254647c8974e28a"} Dec 01 18:51:28 crc kubenswrapper[4935]: I1201 18:51:28.203108 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb" event={"ID":"6d09f2a0-653e-417a-8fee-53935bc27816","Type":"ContainerStarted","Data":"684e2b49e991f12ce8a4d86d199947571267879e1adf13b7ac0d8e26d57a980f"} Dec 01 18:51:28 crc kubenswrapper[4935]: I1201 18:51:28.209107 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb" event={"ID":"b5460053-e8df-4350-a4a4-ff44683d9f60","Type":"ContainerStarted","Data":"a6ad552ba17b2a08c0ca25a4ad896b46a10638cc396368506b29809a182c0223"} Dec 01 18:51:29 crc kubenswrapper[4935]: I1201 18:51:29.219993 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" event={"ID":"96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325","Type":"ContainerStarted","Data":"226ba17612e1b036cc91eddbf7785c6f7779cbe90781dc08ad75659313f0c253"} Dec 01 18:51:29 crc kubenswrapper[4935]: I1201 18:51:29.220300 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:51:29 crc kubenswrapper[4935]: I1201 18:51:29.222774 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" event={"ID":"50e604ea-ddfe-470b-bbbf-b65a5948d9d7","Type":"ContainerStarted","Data":"893f12a6ef80bc388567a9bc88ba5c1efb44971acdcd55f761d2ad770ec841f1"} Dec 01 18:51:29 crc kubenswrapper[4935]: I1201 18:51:29.230004 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" event={"ID":"c5fb0811-5cb8-4bff-927c-99f4e08b8ae0","Type":"ContainerStarted","Data":"7ca6661cac772d2ccd030fa856585b3e4892611bb7f59020430d3c6f9a37677b"} Dec 01 18:51:29 crc kubenswrapper[4935]: I1201 18:51:29.250727 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" podStartSLOduration=43.250703515 podStartE2EDuration="43.250703515s" podCreationTimestamp="2025-12-01 18:50:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:51:29.247626858 +0000 UTC m=+1303.269256127" watchObservedRunningTime="2025-12-01 18:51:29.250703515 +0000 UTC m=+1303.272332774" Dec 01 18:51:31 crc kubenswrapper[4935]: E1201 18:51:31.135628 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm" podUID="8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6" Dec 01 18:51:31 crc kubenswrapper[4935]: E1201 18:51:31.147086 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc" podUID="671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161" Dec 01 18:51:31 crc kubenswrapper[4935]: I1201 18:51:31.251963 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc" event={"ID":"671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161","Type":"ContainerStarted","Data":"ef11370b15b605d8f0710894fa3c274f46ad6466a2522248c74bffe4fd88605e"} Dec 01 18:51:31 crc kubenswrapper[4935]: I1201 18:51:31.254559 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm" event={"ID":"8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6","Type":"ContainerStarted","Data":"a54a84eaccc74d56562ca4814ec9b264369f8736c5ba811c22e520f25448698d"} Dec 01 18:51:31 crc kubenswrapper[4935]: I1201 18:51:31.257336 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" event={"ID":"fef2b5dc-a162-4b91-ada5-f6af85d8fe20","Type":"ContainerStarted","Data":"4aee963dab722e992d5e259f0f1b53cbc47782cb628035d18fb7e40ea6040746"} Dec 01 18:51:31 crc kubenswrapper[4935]: I1201 18:51:31.262014 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" event={"ID":"50e604ea-ddfe-470b-bbbf-b65a5948d9d7","Type":"ContainerStarted","Data":"bbc0858e1e0874e0058738d8f75a928346c1a4d7022f7578b5a27709fe37b7ae"} Dec 01 18:51:31 crc kubenswrapper[4935]: I1201 18:51:31.262230 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" Dec 01 18:51:31 crc kubenswrapper[4935]: I1201 18:51:31.328163 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" podStartSLOduration=12.825451444 podStartE2EDuration="45.328131555s" podCreationTimestamp="2025-12-01 18:50:46 +0000 UTC" firstStartedPulling="2025-12-01 18:50:48.628748004 +0000 UTC m=+1262.650377273" lastFinishedPulling="2025-12-01 18:51:21.131428125 +0000 UTC m=+1295.153057384" observedRunningTime="2025-12-01 18:51:31.317374495 +0000 UTC m=+1305.339003754" watchObservedRunningTime="2025-12-01 18:51:31.328131555 +0000 UTC m=+1305.349760814" Dec 01 18:51:31 crc kubenswrapper[4935]: E1201 18:51:31.465449 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w" podUID="8a1ded04-5c24-467c-a51b-c0cfbe67ba4b" Dec 01 18:51:31 crc kubenswrapper[4935]: E1201 18:51:31.546877 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv" podUID="90db9fa3-a008-4a95-910d-fd7b92f37dea" Dec 01 18:51:31 crc kubenswrapper[4935]: E1201 18:51:31.591357 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw" podUID="e553c27c-e8f0-4617-a914-46c8b5cfc33b" Dec 01 18:51:31 crc kubenswrapper[4935]: E1201 18:51:31.734888 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb" podUID="e3aa8650-ce39-4eb2-8cae-eb012347abb6" Dec 01 18:51:31 crc kubenswrapper[4935]: E1201 18:51:31.738712 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk" podUID="61654c85-dd73-48d3-9931-1ce7095e4f07" Dec 01 18:51:31 crc kubenswrapper[4935]: E1201 18:51:31.891834 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-25tbb" podUID="114bfc93-038f-416e-8a85-f1697387b2e2" Dec 01 18:51:31 crc kubenswrapper[4935]: E1201 18:51:31.920901 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd" podUID="d0336662-89bd-415e-8c22-2b05bf5dbf9f" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.294477 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb" event={"ID":"b5460053-e8df-4350-a4a4-ff44683d9f60","Type":"ContainerStarted","Data":"7d01b481ec5b8b7aca59dee04bc960e002190aedc052db07656d4552f7be08da"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.294666 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.297141 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.309618 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q" event={"ID":"0fa3cc8f-0a56-4bec-8afb-3fb3599fb222","Type":"ContainerStarted","Data":"cedacdc5a012f318558ce68b0a13cb38baac00569168f8ca21cc285fa4add182"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.310850 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.322439 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.329361 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd" event={"ID":"d0336662-89bd-415e-8c22-2b05bf5dbf9f","Type":"ContainerStarted","Data":"399651a9a5ae2995e42f78de7e7daa16e316f5c3216244fbc8f39444ad5b46d2"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.347081 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-25tbb" event={"ID":"114bfc93-038f-416e-8a85-f1697387b2e2","Type":"ContainerStarted","Data":"1efe5c6369c9907fbb0cb1848631412a2eb3d301eb0c8560421780c8a58b4611"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.360490 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" event={"ID":"c5fb0811-5cb8-4bff-927c-99f4e08b8ae0","Type":"ContainerStarted","Data":"a32052113dc413c80f6c1c12b14e08556f9e18b7033b5e7960b49c4e190f1d58"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.361322 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.362965 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-7ppgb" podStartSLOduration=3.757772207 podStartE2EDuration="46.362939417s" podCreationTimestamp="2025-12-01 18:50:46 +0000 UTC" firstStartedPulling="2025-12-01 18:50:48.21460286 +0000 UTC m=+1262.236232119" lastFinishedPulling="2025-12-01 18:51:30.81977007 +0000 UTC m=+1304.841399329" observedRunningTime="2025-12-01 18:51:32.342237582 +0000 UTC m=+1306.363866841" watchObservedRunningTime="2025-12-01 18:51:32.362939417 +0000 UTC m=+1306.384568676" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.377751 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" event={"ID":"fef2b5dc-a162-4b91-ada5-f6af85d8fe20","Type":"ContainerStarted","Data":"fb33596f44dbed6e82d045a9412f26c12bc9c66a0cfd26f95573be9d77f2b2ee"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.378649 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.392441 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk" event={"ID":"61654c85-dd73-48d3-9931-1ce7095e4f07","Type":"ContainerStarted","Data":"564895648ca5bc12a0dfec1ed309d50de67dea0a042e664037a105b875150f50"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.417299 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc" event={"ID":"671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161","Type":"ContainerStarted","Data":"33a90d9a0657ee5b4e5d9080bc8b3abc0f286b497b2f54d57780ab24cb4fd6ac"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.418076 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.444672 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw" event={"ID":"e553c27c-e8f0-4617-a914-46c8b5cfc33b","Type":"ContainerStarted","Data":"3e65706c04e7b2d072531cf5a2e32bd7cbc996c5a321a733ac0b7e880c458ea0"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.457394 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv" event={"ID":"90db9fa3-a008-4a95-910d-fd7b92f37dea","Type":"ContainerStarted","Data":"a7e58cda6d2164e40b77a9fbc562c53d1fafdc08aae37eebd2e8111caa81cd0f"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.477535 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-wmr2q" podStartSLOduration=4.352712212 podStartE2EDuration="46.477512718s" podCreationTimestamp="2025-12-01 18:50:46 +0000 UTC" firstStartedPulling="2025-12-01 18:50:48.624964352 +0000 UTC m=+1262.646593611" lastFinishedPulling="2025-12-01 18:51:30.749764848 +0000 UTC m=+1304.771394117" observedRunningTime="2025-12-01 18:51:32.436440249 +0000 UTC m=+1306.458069508" watchObservedRunningTime="2025-12-01 18:51:32.477512718 +0000 UTC m=+1306.499141987" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.505787 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" event={"ID":"38100ae6-51a8-4a49-87d1-704ea8b5a0bc","Type":"ContainerStarted","Data":"b0364670ae89539117c58510408e32fa1eb31f8b6ebbbc20d8414240c379eae2"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.505832 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" event={"ID":"38100ae6-51a8-4a49-87d1-704ea8b5a0bc","Type":"ContainerStarted","Data":"d5c60ddaa0be980ff4c915d0b3a4c69ae9e78589cdebd70614a0b5e2a8f3c4af"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.506012 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.510235 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" podStartSLOduration=14.004898873 podStartE2EDuration="46.510218821s" podCreationTimestamp="2025-12-01 18:50:46 +0000 UTC" firstStartedPulling="2025-12-01 18:50:48.625045654 +0000 UTC m=+1262.646674913" lastFinishedPulling="2025-12-01 18:51:21.130365602 +0000 UTC m=+1295.151994861" observedRunningTime="2025-12-01 18:51:32.499641477 +0000 UTC m=+1306.521270736" watchObservedRunningTime="2025-12-01 18:51:32.510218821 +0000 UTC m=+1306.531848080" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.543883 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" podStartSLOduration=37.516358433 podStartE2EDuration="46.543864504s" podCreationTimestamp="2025-12-01 18:50:46 +0000 UTC" firstStartedPulling="2025-12-01 18:51:21.631982194 +0000 UTC m=+1295.653611453" lastFinishedPulling="2025-12-01 18:51:30.659488255 +0000 UTC m=+1304.681117524" observedRunningTime="2025-12-01 18:51:32.535174539 +0000 UTC m=+1306.556803798" watchObservedRunningTime="2025-12-01 18:51:32.543864504 +0000 UTC m=+1306.565493763" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.570477 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722" event={"ID":"07d2f6f9-58fc-4c36-b2b8-ce0c48424c28","Type":"ContainerStarted","Data":"faf056f9c9619017296ea7f5827ac92fdf2be11d6cd5a7f3c65db403ebe5aea3"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.570529 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.570559 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.575827 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc" podStartSLOduration=3.369640133 podStartE2EDuration="47.575809044s" podCreationTimestamp="2025-12-01 18:50:45 +0000 UTC" firstStartedPulling="2025-12-01 18:50:47.693586948 +0000 UTC m=+1261.715216197" lastFinishedPulling="2025-12-01 18:51:31.899755829 +0000 UTC m=+1305.921385108" observedRunningTime="2025-12-01 18:51:32.569122912 +0000 UTC m=+1306.590752171" watchObservedRunningTime="2025-12-01 18:51:32.575809044 +0000 UTC m=+1306.597438313" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.593240 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v" event={"ID":"d19dd9f6-38a2-4bdb-be7b-54184b15b7ab","Type":"ContainerStarted","Data":"6b4f14dab1a87cb8f46d7725dc7c90075d55aceaf8e75f7577a8fee6e524db79"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.593277 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.606925 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.622319 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq" event={"ID":"122ce04b-8536-401a-820f-fd1b9f04afcf","Type":"ContainerStarted","Data":"f66e70213dc4abd2c85de55788b8445e1ba5bef633269ddb223641325c4e15f8"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.624236 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.632170 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.656735 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb" event={"ID":"6d09f2a0-653e-417a-8fee-53935bc27816","Type":"ContainerStarted","Data":"974330d7749821d7c87732a06a139f1295000785e4abcc546ad4f51915f8ef7a"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.658068 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.658302 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wj722" podStartSLOduration=5.143902892 podStartE2EDuration="47.65828017s" podCreationTimestamp="2025-12-01 18:50:45 +0000 UTC" firstStartedPulling="2025-12-01 18:50:48.208029736 +0000 UTC m=+1262.229658995" lastFinishedPulling="2025-12-01 18:51:30.722406994 +0000 UTC m=+1304.744036273" observedRunningTime="2025-12-01 18:51:32.653811449 +0000 UTC m=+1306.675440708" watchObservedRunningTime="2025-12-01 18:51:32.65828017 +0000 UTC m=+1306.679909429" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.670512 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.672956 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb" event={"ID":"e3aa8650-ce39-4eb2-8cae-eb012347abb6","Type":"ContainerStarted","Data":"b502e645ef8aa2d5521625f8f48b2661f7c40c3b8c5e7ff92fdaefcb4247dfa6"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.679376 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w" event={"ID":"8a1ded04-5c24-467c-a51b-c0cfbe67ba4b","Type":"ContainerStarted","Data":"5ce6b7d682c5bf5ba3ac66cb9a944b0f0b9cbf591d64c423c5357f3022d5a0e8"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.694435 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6" event={"ID":"a66fb641-eb39-4326-a4cb-d4e006a57436","Type":"ContainerStarted","Data":"151b9d65c1acd1312297a8f390081c2e869ad81a99dfce6f40a5611e01152eae"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.694908 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.714210 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.715658 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss" event={"ID":"d834cbf1-7527-4530-94ca-a0188780da7d","Type":"ContainerStarted","Data":"0e36b5d88ff162263a10f399301cff4e6faf6a92882f84e9a7eab55a5d23e18c"} Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.715696 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.715817 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-qth5v" podStartSLOduration=4.080386033 podStartE2EDuration="46.715805408s" podCreationTimestamp="2025-12-01 18:50:46 +0000 UTC" firstStartedPulling="2025-12-01 18:50:48.184090987 +0000 UTC m=+1262.205720246" lastFinishedPulling="2025-12-01 18:51:30.819510362 +0000 UTC m=+1304.841139621" observedRunningTime="2025-12-01 18:51:32.703788568 +0000 UTC m=+1306.725417827" watchObservedRunningTime="2025-12-01 18:51:32.715805408 +0000 UTC m=+1306.737434667" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.724372 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.744025 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" podStartSLOduration=38.678783845 podStartE2EDuration="47.744005908s" podCreationTimestamp="2025-12-01 18:50:45 +0000 UTC" firstStartedPulling="2025-12-01 18:51:21.657186791 +0000 UTC m=+1295.678816050" lastFinishedPulling="2025-12-01 18:51:30.722408844 +0000 UTC m=+1304.744038113" observedRunningTime="2025-12-01 18:51:32.733393963 +0000 UTC m=+1306.755023222" watchObservedRunningTime="2025-12-01 18:51:32.744005908 +0000 UTC m=+1306.765635157" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.870249 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-4crq6" podStartSLOduration=3.9383171580000003 podStartE2EDuration="47.870232107s" podCreationTimestamp="2025-12-01 18:50:45 +0000 UTC" firstStartedPulling="2025-12-01 18:50:46.896731312 +0000 UTC m=+1260.918360571" lastFinishedPulling="2025-12-01 18:51:30.828646261 +0000 UTC m=+1304.850275520" observedRunningTime="2025-12-01 18:51:32.863743373 +0000 UTC m=+1306.885372632" watchObservedRunningTime="2025-12-01 18:51:32.870232107 +0000 UTC m=+1306.891861376" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.907772 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-wwjxq" podStartSLOduration=4.494680572 podStartE2EDuration="47.907757204s" podCreationTimestamp="2025-12-01 18:50:45 +0000 UTC" firstStartedPulling="2025-12-01 18:50:47.315239548 +0000 UTC m=+1261.336868807" lastFinishedPulling="2025-12-01 18:51:30.72831617 +0000 UTC m=+1304.749945439" observedRunningTime="2025-12-01 18:51:32.895240348 +0000 UTC m=+1306.916869607" watchObservedRunningTime="2025-12-01 18:51:32.907757204 +0000 UTC m=+1306.929386463" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.926610 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-slbss" podStartSLOduration=4.713543563 podStartE2EDuration="46.926591988s" podCreationTimestamp="2025-12-01 18:50:46 +0000 UTC" firstStartedPulling="2025-12-01 18:50:48.624986752 +0000 UTC m=+1262.646616011" lastFinishedPulling="2025-12-01 18:51:30.838035187 +0000 UTC m=+1304.859664436" observedRunningTime="2025-12-01 18:51:32.921394464 +0000 UTC m=+1306.943023713" watchObservedRunningTime="2025-12-01 18:51:32.926591988 +0000 UTC m=+1306.948221247" Dec 01 18:51:32 crc kubenswrapper[4935]: I1201 18:51:32.975587 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-7445b68fd8-4tjzb" podStartSLOduration=4.87730766 podStartE2EDuration="46.975564797s" podCreationTimestamp="2025-12-01 18:50:46 +0000 UTC" firstStartedPulling="2025-12-01 18:50:48.625063095 +0000 UTC m=+1262.646692354" lastFinishedPulling="2025-12-01 18:51:30.723320222 +0000 UTC m=+1304.744949491" observedRunningTime="2025-12-01 18:51:32.962451831 +0000 UTC m=+1306.984081090" watchObservedRunningTime="2025-12-01 18:51:32.975564797 +0000 UTC m=+1306.997194056" Dec 01 18:51:33 crc kubenswrapper[4935]: I1201 18:51:33.742614 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw" event={"ID":"e553c27c-e8f0-4617-a914-46c8b5cfc33b","Type":"ContainerStarted","Data":"017a562a1af9910f7417d82068a9ffa75fdb3dcc36a4a319bea865cc0d0ade0c"} Dec 01 18:51:33 crc kubenswrapper[4935]: I1201 18:51:33.743816 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw" Dec 01 18:51:33 crc kubenswrapper[4935]: I1201 18:51:33.749927 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk" event={"ID":"61654c85-dd73-48d3-9931-1ce7095e4f07","Type":"ContainerStarted","Data":"a17e7ce1bafb0f33ef6edcc6ac43f66d3279e17eef39cb8ee1805eae4fa69821"} Dec 01 18:51:33 crc kubenswrapper[4935]: I1201 18:51:33.750223 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk" Dec 01 18:51:33 crc kubenswrapper[4935]: I1201 18:51:33.756706 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm" event={"ID":"8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6","Type":"ContainerStarted","Data":"9d1e6c756fe83e95f36c4e17b6be600ea5e0a32598e9fc5c5cdf41514569cc6b"} Dec 01 18:51:33 crc kubenswrapper[4935]: I1201 18:51:33.756924 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm" Dec 01 18:51:33 crc kubenswrapper[4935]: I1201 18:51:33.766068 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw" podStartSLOduration=3.266515414 podStartE2EDuration="47.766049046s" podCreationTimestamp="2025-12-01 18:50:46 +0000 UTC" firstStartedPulling="2025-12-01 18:50:48.624554568 +0000 UTC m=+1262.646183827" lastFinishedPulling="2025-12-01 18:51:33.1240882 +0000 UTC m=+1307.145717459" observedRunningTime="2025-12-01 18:51:33.760725308 +0000 UTC m=+1307.782354557" watchObservedRunningTime="2025-12-01 18:51:33.766049046 +0000 UTC m=+1307.787678305" Dec 01 18:51:33 crc kubenswrapper[4935]: I1201 18:51:33.768665 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd" event={"ID":"d0336662-89bd-415e-8c22-2b05bf5dbf9f","Type":"ContainerStarted","Data":"5c16ca59294743d291ec067fd797dada771d26a4656234dfd0be1d704e721b3a"} Dec 01 18:51:33 crc kubenswrapper[4935]: I1201 18:51:33.785212 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm" podStartSLOduration=4.263823687 podStartE2EDuration="48.785188502s" podCreationTimestamp="2025-12-01 18:50:45 +0000 UTC" firstStartedPulling="2025-12-01 18:50:47.710940233 +0000 UTC m=+1261.732569492" lastFinishedPulling="2025-12-01 18:51:32.232305048 +0000 UTC m=+1306.253934307" observedRunningTime="2025-12-01 18:51:33.777463587 +0000 UTC m=+1307.799092836" watchObservedRunningTime="2025-12-01 18:51:33.785188502 +0000 UTC m=+1307.806817761" Dec 01 18:51:33 crc kubenswrapper[4935]: I1201 18:51:33.799757 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk" podStartSLOduration=2.905197867 podStartE2EDuration="47.799734641s" podCreationTimestamp="2025-12-01 18:50:46 +0000 UTC" firstStartedPulling="2025-12-01 18:50:48.229338859 +0000 UTC m=+1262.250968118" lastFinishedPulling="2025-12-01 18:51:33.123875633 +0000 UTC m=+1307.145504892" observedRunningTime="2025-12-01 18:51:33.796900371 +0000 UTC m=+1307.818529630" watchObservedRunningTime="2025-12-01 18:51:33.799734641 +0000 UTC m=+1307.821363900" Dec 01 18:51:33 crc kubenswrapper[4935]: I1201 18:51:33.833634 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd" podStartSLOduration=3.970264981 podStartE2EDuration="48.833603171s" podCreationTimestamp="2025-12-01 18:50:45 +0000 UTC" firstStartedPulling="2025-12-01 18:50:48.259612344 +0000 UTC m=+1262.281241603" lastFinishedPulling="2025-12-01 18:51:33.122950534 +0000 UTC m=+1307.144579793" observedRunningTime="2025-12-01 18:51:33.830426741 +0000 UTC m=+1307.852056000" watchObservedRunningTime="2025-12-01 18:51:33.833603171 +0000 UTC m=+1307.855232430" Dec 01 18:51:34 crc kubenswrapper[4935]: I1201 18:51:34.778251 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w" event={"ID":"8a1ded04-5c24-467c-a51b-c0cfbe67ba4b","Type":"ContainerStarted","Data":"1f674c049dcc94922da5300b5f2bdb112cf0fa74d7b3722f70a0a706fbe5034e"} Dec 01 18:51:34 crc kubenswrapper[4935]: I1201 18:51:34.779512 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w" Dec 01 18:51:34 crc kubenswrapper[4935]: I1201 18:51:34.780138 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-25tbb" event={"ID":"114bfc93-038f-416e-8a85-f1697387b2e2","Type":"ContainerStarted","Data":"2454ac8936134d326041be3139e68148da6370cfdaa35bc2e656aef60aa31cab"} Dec 01 18:51:34 crc kubenswrapper[4935]: I1201 18:51:34.780345 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-25tbb" Dec 01 18:51:34 crc kubenswrapper[4935]: I1201 18:51:34.781885 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb" event={"ID":"e3aa8650-ce39-4eb2-8cae-eb012347abb6","Type":"ContainerStarted","Data":"fe52f405ddd5792824ef946d16284c564f488498ac18aea3b3fdcebca88fbd1b"} Dec 01 18:51:34 crc kubenswrapper[4935]: I1201 18:51:34.782042 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb" Dec 01 18:51:34 crc kubenswrapper[4935]: I1201 18:51:34.783864 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv" event={"ID":"90db9fa3-a008-4a95-910d-fd7b92f37dea","Type":"ContainerStarted","Data":"f7adee4eb0ad29bd97a4bf634d59be0d8a127de15cd31d33acc292944e640700"} Dec 01 18:51:34 crc kubenswrapper[4935]: I1201 18:51:34.784364 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd" Dec 01 18:51:34 crc kubenswrapper[4935]: I1201 18:51:34.788222 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-82j75" Dec 01 18:51:34 crc kubenswrapper[4935]: I1201 18:51:34.806363 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w" podStartSLOduration=3.272707025 podStartE2EDuration="49.806335571s" podCreationTimestamp="2025-12-01 18:50:45 +0000 UTC" firstStartedPulling="2025-12-01 18:50:47.016086255 +0000 UTC m=+1261.037715514" lastFinishedPulling="2025-12-01 18:51:33.549714801 +0000 UTC m=+1307.571344060" observedRunningTime="2025-12-01 18:51:34.79619873 +0000 UTC m=+1308.817827999" watchObservedRunningTime="2025-12-01 18:51:34.806335571 +0000 UTC m=+1308.827964860" Dec 01 18:51:34 crc kubenswrapper[4935]: I1201 18:51:34.835919 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv" podStartSLOduration=3.840462001 podStartE2EDuration="49.835900135s" podCreationTimestamp="2025-12-01 18:50:45 +0000 UTC" firstStartedPulling="2025-12-01 18:50:47.66135216 +0000 UTC m=+1261.682981419" lastFinishedPulling="2025-12-01 18:51:33.656790294 +0000 UTC m=+1307.678419553" observedRunningTime="2025-12-01 18:51:34.831660842 +0000 UTC m=+1308.853290101" watchObservedRunningTime="2025-12-01 18:51:34.835900135 +0000 UTC m=+1308.857529394" Dec 01 18:51:34 crc kubenswrapper[4935]: I1201 18:51:34.859282 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb" podStartSLOduration=3.622108787 podStartE2EDuration="49.859266024s" podCreationTimestamp="2025-12-01 18:50:45 +0000 UTC" firstStartedPulling="2025-12-01 18:50:47.315237578 +0000 UTC m=+1261.336866877" lastFinishedPulling="2025-12-01 18:51:33.552394855 +0000 UTC m=+1307.574024114" observedRunningTime="2025-12-01 18:51:34.853250974 +0000 UTC m=+1308.874880233" watchObservedRunningTime="2025-12-01 18:51:34.859266024 +0000 UTC m=+1308.880895283" Dec 01 18:51:34 crc kubenswrapper[4935]: I1201 18:51:34.878178 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-25tbb" podStartSLOduration=3.515615027 podStartE2EDuration="48.87812413s" podCreationTimestamp="2025-12-01 18:50:46 +0000 UTC" firstStartedPulling="2025-12-01 18:50:48.187171477 +0000 UTC m=+1262.208800726" lastFinishedPulling="2025-12-01 18:51:33.54968057 +0000 UTC m=+1307.571309829" observedRunningTime="2025-12-01 18:51:34.874999251 +0000 UTC m=+1308.896628530" watchObservedRunningTime="2025-12-01 18:51:34.87812413 +0000 UTC m=+1308.899753389" Dec 01 18:51:35 crc kubenswrapper[4935]: I1201 18:51:35.794043 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6wpv9" event={"ID":"367ed696-aab2-40a9-bdd8-04b4a5e928d0","Type":"ContainerStarted","Data":"93b4cc39e0c5ed5adc246c1a3a106d227658606fa7d4bc291faf16095ddb1ffe"} Dec 01 18:51:35 crc kubenswrapper[4935]: I1201 18:51:35.795177 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv" Dec 01 18:51:35 crc kubenswrapper[4935]: I1201 18:51:35.813429 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6wpv9" podStartSLOduration=3.479267139 podStartE2EDuration="49.813406256s" podCreationTimestamp="2025-12-01 18:50:46 +0000 UTC" firstStartedPulling="2025-12-01 18:50:48.624573079 +0000 UTC m=+1262.646202338" lastFinishedPulling="2025-12-01 18:51:34.958712196 +0000 UTC m=+1308.980341455" observedRunningTime="2025-12-01 18:51:35.810587357 +0000 UTC m=+1309.832216626" watchObservedRunningTime="2025-12-01 18:51:35.813406256 +0000 UTC m=+1309.835035525" Dec 01 18:51:37 crc kubenswrapper[4935]: I1201 18:51:37.007081 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kg6tq" Dec 01 18:51:39 crc kubenswrapper[4935]: I1201 18:51:39.113237 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-96bb7f5d4-59p62" Dec 01 18:51:42 crc kubenswrapper[4935]: I1201 18:51:42.219583 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-k9kcp" Dec 01 18:51:42 crc kubenswrapper[4935]: I1201 18:51:42.896305 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw" Dec 01 18:51:46 crc kubenswrapper[4935]: I1201 18:51:46.143674 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-w2v9w" Dec 01 18:51:46 crc kubenswrapper[4935]: I1201 18:51:46.165832 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7hmvv" Dec 01 18:51:46 crc kubenswrapper[4935]: I1201 18:51:46.212422 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-j7bwb" Dec 01 18:51:46 crc kubenswrapper[4935]: I1201 18:51:46.261265 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-zx4xc" Dec 01 18:51:46 crc kubenswrapper[4935]: I1201 18:51:46.403177 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-vk8hm" Dec 01 18:51:46 crc kubenswrapper[4935]: I1201 18:51:46.623705 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vtzkd" Dec 01 18:51:46 crc kubenswrapper[4935]: I1201 18:51:46.730936 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8xpwk" Dec 01 18:51:46 crc kubenswrapper[4935]: I1201 18:51:46.794995 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-25tbb" Dec 01 18:51:47 crc kubenswrapper[4935]: I1201 18:51:47.144245 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-mhqkw" Dec 01 18:51:54 crc kubenswrapper[4935]: I1201 18:51:54.346397 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:51:54 crc kubenswrapper[4935]: I1201 18:51:54.346902 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.653251 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k6x9g"] Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.655716 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-k6x9g" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.658041 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-xq89w" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.658399 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.658617 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.659372 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.666869 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k6x9g"] Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.719671 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5496531a-5f85-4e1e-907b-c25fe65db26f-config\") pod \"dnsmasq-dns-675f4bcbfc-k6x9g\" (UID: \"5496531a-5f85-4e1e-907b-c25fe65db26f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k6x9g" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.719747 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94thk\" (UniqueName: \"kubernetes.io/projected/5496531a-5f85-4e1e-907b-c25fe65db26f-kube-api-access-94thk\") pod \"dnsmasq-dns-675f4bcbfc-k6x9g\" (UID: \"5496531a-5f85-4e1e-907b-c25fe65db26f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k6x9g" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.740318 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-jqt6x"] Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.741724 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.743831 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.760760 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-jqt6x"] Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.821741 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94thk\" (UniqueName: \"kubernetes.io/projected/5496531a-5f85-4e1e-907b-c25fe65db26f-kube-api-access-94thk\") pod \"dnsmasq-dns-675f4bcbfc-k6x9g\" (UID: \"5496531a-5f85-4e1e-907b-c25fe65db26f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k6x9g" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.821872 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8083a542-a1a0-47d2-b940-c8303d540615-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-jqt6x\" (UID: \"8083a542-a1a0-47d2-b940-c8303d540615\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.821903 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hrlk\" (UniqueName: \"kubernetes.io/projected/8083a542-a1a0-47d2-b940-c8303d540615-kube-api-access-4hrlk\") pod \"dnsmasq-dns-78dd6ddcc-jqt6x\" (UID: \"8083a542-a1a0-47d2-b940-c8303d540615\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.821958 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5496531a-5f85-4e1e-907b-c25fe65db26f-config\") pod \"dnsmasq-dns-675f4bcbfc-k6x9g\" (UID: \"5496531a-5f85-4e1e-907b-c25fe65db26f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k6x9g" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.822029 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8083a542-a1a0-47d2-b940-c8303d540615-config\") pod \"dnsmasq-dns-78dd6ddcc-jqt6x\" (UID: \"8083a542-a1a0-47d2-b940-c8303d540615\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.822756 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5496531a-5f85-4e1e-907b-c25fe65db26f-config\") pod \"dnsmasq-dns-675f4bcbfc-k6x9g\" (UID: \"5496531a-5f85-4e1e-907b-c25fe65db26f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k6x9g" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.841100 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94thk\" (UniqueName: \"kubernetes.io/projected/5496531a-5f85-4e1e-907b-c25fe65db26f-kube-api-access-94thk\") pod \"dnsmasq-dns-675f4bcbfc-k6x9g\" (UID: \"5496531a-5f85-4e1e-907b-c25fe65db26f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k6x9g" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.924187 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8083a542-a1a0-47d2-b940-c8303d540615-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-jqt6x\" (UID: \"8083a542-a1a0-47d2-b940-c8303d540615\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.924241 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hrlk\" (UniqueName: \"kubernetes.io/projected/8083a542-a1a0-47d2-b940-c8303d540615-kube-api-access-4hrlk\") pod \"dnsmasq-dns-78dd6ddcc-jqt6x\" (UID: \"8083a542-a1a0-47d2-b940-c8303d540615\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.924625 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8083a542-a1a0-47d2-b940-c8303d540615-config\") pod \"dnsmasq-dns-78dd6ddcc-jqt6x\" (UID: \"8083a542-a1a0-47d2-b940-c8303d540615\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.925127 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8083a542-a1a0-47d2-b940-c8303d540615-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-jqt6x\" (UID: \"8083a542-a1a0-47d2-b940-c8303d540615\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.925354 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8083a542-a1a0-47d2-b940-c8303d540615-config\") pod \"dnsmasq-dns-78dd6ddcc-jqt6x\" (UID: \"8083a542-a1a0-47d2-b940-c8303d540615\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.939488 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hrlk\" (UniqueName: \"kubernetes.io/projected/8083a542-a1a0-47d2-b940-c8303d540615-kube-api-access-4hrlk\") pod \"dnsmasq-dns-78dd6ddcc-jqt6x\" (UID: \"8083a542-a1a0-47d2-b940-c8303d540615\") " pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" Dec 01 18:52:01 crc kubenswrapper[4935]: I1201 18:52:01.973297 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-k6x9g" Dec 01 18:52:02 crc kubenswrapper[4935]: I1201 18:52:02.056078 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" Dec 01 18:52:02 crc kubenswrapper[4935]: I1201 18:52:02.495152 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k6x9g"] Dec 01 18:52:02 crc kubenswrapper[4935]: I1201 18:52:02.666359 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-jqt6x"] Dec 01 18:52:03 crc kubenswrapper[4935]: I1201 18:52:03.073384 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-k6x9g" event={"ID":"5496531a-5f85-4e1e-907b-c25fe65db26f","Type":"ContainerStarted","Data":"8333d0b1554bad64ae60eeaed9582731ac1abacbcdead13623ccdb335bb2e169"} Dec 01 18:52:03 crc kubenswrapper[4935]: I1201 18:52:03.076900 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" event={"ID":"8083a542-a1a0-47d2-b940-c8303d540615","Type":"ContainerStarted","Data":"2d1693b5fb26163cca92077d8025688946bc2e50a7eb76d3a845283c5dd840a5"} Dec 01 18:52:04 crc kubenswrapper[4935]: I1201 18:52:04.891027 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k6x9g"] Dec 01 18:52:04 crc kubenswrapper[4935]: I1201 18:52:04.943871 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-cxptx"] Dec 01 18:52:04 crc kubenswrapper[4935]: I1201 18:52:04.946345 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:04 crc kubenswrapper[4935]: I1201 18:52:04.951952 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-cxptx"] Dec 01 18:52:04 crc kubenswrapper[4935]: I1201 18:52:04.993375 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkcz9\" (UniqueName: \"kubernetes.io/projected/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-kube-api-access-bkcz9\") pod \"dnsmasq-dns-666b6646f7-cxptx\" (UID: \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\") " pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:04 crc kubenswrapper[4935]: I1201 18:52:04.993509 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-dns-svc\") pod \"dnsmasq-dns-666b6646f7-cxptx\" (UID: \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\") " pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:04 crc kubenswrapper[4935]: I1201 18:52:04.993548 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-config\") pod \"dnsmasq-dns-666b6646f7-cxptx\" (UID: \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\") " pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.099322 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkcz9\" (UniqueName: \"kubernetes.io/projected/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-kube-api-access-bkcz9\") pod \"dnsmasq-dns-666b6646f7-cxptx\" (UID: \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\") " pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.099443 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-dns-svc\") pod \"dnsmasq-dns-666b6646f7-cxptx\" (UID: \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\") " pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.099482 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-config\") pod \"dnsmasq-dns-666b6646f7-cxptx\" (UID: \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\") " pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.100445 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-config\") pod \"dnsmasq-dns-666b6646f7-cxptx\" (UID: \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\") " pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.100486 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-dns-svc\") pod \"dnsmasq-dns-666b6646f7-cxptx\" (UID: \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\") " pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.120910 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkcz9\" (UniqueName: \"kubernetes.io/projected/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-kube-api-access-bkcz9\") pod \"dnsmasq-dns-666b6646f7-cxptx\" (UID: \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\") " pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.239465 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-jqt6x"] Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.261523 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-xdzvr"] Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.263014 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.274695 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-xdzvr"] Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.296354 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.302753 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d028f398-7da7-4877-a954-6322e304e369-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-xdzvr\" (UID: \"d028f398-7da7-4877-a954-6322e304e369\") " pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.302922 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28hg5\" (UniqueName: \"kubernetes.io/projected/d028f398-7da7-4877-a954-6322e304e369-kube-api-access-28hg5\") pod \"dnsmasq-dns-57d769cc4f-xdzvr\" (UID: \"d028f398-7da7-4877-a954-6322e304e369\") " pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.302967 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d028f398-7da7-4877-a954-6322e304e369-config\") pod \"dnsmasq-dns-57d769cc4f-xdzvr\" (UID: \"d028f398-7da7-4877-a954-6322e304e369\") " pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.406199 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28hg5\" (UniqueName: \"kubernetes.io/projected/d028f398-7da7-4877-a954-6322e304e369-kube-api-access-28hg5\") pod \"dnsmasq-dns-57d769cc4f-xdzvr\" (UID: \"d028f398-7da7-4877-a954-6322e304e369\") " pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.406310 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d028f398-7da7-4877-a954-6322e304e369-config\") pod \"dnsmasq-dns-57d769cc4f-xdzvr\" (UID: \"d028f398-7da7-4877-a954-6322e304e369\") " pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.406352 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d028f398-7da7-4877-a954-6322e304e369-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-xdzvr\" (UID: \"d028f398-7da7-4877-a954-6322e304e369\") " pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.620319 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d028f398-7da7-4877-a954-6322e304e369-config\") pod \"dnsmasq-dns-57d769cc4f-xdzvr\" (UID: \"d028f398-7da7-4877-a954-6322e304e369\") " pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.620433 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d028f398-7da7-4877-a954-6322e304e369-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-xdzvr\" (UID: \"d028f398-7da7-4877-a954-6322e304e369\") " pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.623663 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28hg5\" (UniqueName: \"kubernetes.io/projected/d028f398-7da7-4877-a954-6322e304e369-kube-api-access-28hg5\") pod \"dnsmasq-dns-57d769cc4f-xdzvr\" (UID: \"d028f398-7da7-4877-a954-6322e304e369\") " pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:05 crc kubenswrapper[4935]: I1201 18:52:05.887821 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.057568 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-cxptx"] Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.080470 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.082195 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.091085 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.091249 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.091330 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.091447 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.091469 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.091608 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.091710 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.092064 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-lkt6v" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.221321 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8bd64079-678d-43de-aeb6-6818338d5997-pod-info\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.221419 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.221472 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.221498 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.221568 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-server-conf\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.221594 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpmjh\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-kube-api-access-gpmjh\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.221627 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-config-data\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.221651 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.221674 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.221755 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8bd64079-678d-43de-aeb6-6818338d5997-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.221781 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.323644 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8bd64079-678d-43de-aeb6-6818338d5997-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.323684 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.323732 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8bd64079-678d-43de-aeb6-6818338d5997-pod-info\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.323755 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.323793 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.323884 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.323936 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-server-conf\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.324346 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpmjh\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-kube-api-access-gpmjh\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.324392 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-config-data\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.324413 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.324806 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.325125 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-config-data\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.325212 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.325431 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-server-conf\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.325612 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.326051 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.326086 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.329339 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8bd64079-678d-43de-aeb6-6818338d5997-pod-info\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.329941 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.334837 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.338330 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8bd64079-678d-43de-aeb6-6818338d5997-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.339021 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpmjh\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-kube-api-access-gpmjh\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.358488 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.406716 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.408078 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.409456 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.411807 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.412006 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.412066 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.412066 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-brpbv" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.412162 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.412193 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.412234 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.438075 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fce93449-11d7-490f-9456-8f8667b9cb6d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.438122 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.438197 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.438229 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.438252 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.438293 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.438313 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.438331 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.438349 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.438393 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7gpf\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-kube-api-access-m7gpf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.438419 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fce93449-11d7-490f-9456-8f8667b9cb6d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.453030 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.540179 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.540242 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.540274 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.540316 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.540338 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.540354 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.540370 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.540400 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7gpf\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-kube-api-access-m7gpf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.540418 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fce93449-11d7-490f-9456-8f8667b9cb6d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.540491 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fce93449-11d7-490f-9456-8f8667b9cb6d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.540508 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.540978 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.541337 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.541429 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.541758 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.541864 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.541984 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.545598 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.550577 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fce93449-11d7-490f-9456-8f8667b9cb6d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.552523 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.553520 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fce93449-11d7-490f-9456-8f8667b9cb6d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.578144 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7gpf\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-kube-api-access-m7gpf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.581050 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:06 crc kubenswrapper[4935]: I1201 18:52:06.787649 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.860964 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.862456 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.869849 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.870530 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-v8b9g" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.871359 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.871667 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.880683 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.902732 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.974260 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a20a342c-d5f0-4a57-b485-5e8a122a6034-config-data-generated\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.974317 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a20a342c-d5f0-4a57-b485-5e8a122a6034-config-data-default\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.974341 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a20a342c-d5f0-4a57-b485-5e8a122a6034-kolla-config\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.974479 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48xvh\" (UniqueName: \"kubernetes.io/projected/a20a342c-d5f0-4a57-b485-5e8a122a6034-kube-api-access-48xvh\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.974530 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a20a342c-d5f0-4a57-b485-5e8a122a6034-operator-scripts\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.974660 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20a342c-d5f0-4a57-b485-5e8a122a6034-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.974797 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:07 crc kubenswrapper[4935]: I1201 18:52:07.974943 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20a342c-d5f0-4a57-b485-5e8a122a6034-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.076935 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48xvh\" (UniqueName: \"kubernetes.io/projected/a20a342c-d5f0-4a57-b485-5e8a122a6034-kube-api-access-48xvh\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.076988 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a20a342c-d5f0-4a57-b485-5e8a122a6034-operator-scripts\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.077053 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20a342c-d5f0-4a57-b485-5e8a122a6034-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.077124 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.077298 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20a342c-d5f0-4a57-b485-5e8a122a6034-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.077331 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a20a342c-d5f0-4a57-b485-5e8a122a6034-config-data-generated\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.077369 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a20a342c-d5f0-4a57-b485-5e8a122a6034-config-data-default\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.077397 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a20a342c-d5f0-4a57-b485-5e8a122a6034-kolla-config\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.077449 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.077858 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a20a342c-d5f0-4a57-b485-5e8a122a6034-config-data-generated\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.078499 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a20a342c-d5f0-4a57-b485-5e8a122a6034-kolla-config\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.078583 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a20a342c-d5f0-4a57-b485-5e8a122a6034-config-data-default\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.078727 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a20a342c-d5f0-4a57-b485-5e8a122a6034-operator-scripts\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.084081 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20a342c-d5f0-4a57-b485-5e8a122a6034-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.085903 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20a342c-d5f0-4a57-b485-5e8a122a6034-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.094880 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48xvh\" (UniqueName: \"kubernetes.io/projected/a20a342c-d5f0-4a57-b485-5e8a122a6034-kube-api-access-48xvh\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.113725 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"a20a342c-d5f0-4a57-b485-5e8a122a6034\") " pod="openstack/openstack-galera-0" Dec 01 18:52:08 crc kubenswrapper[4935]: I1201 18:52:08.207241 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.278521 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.280801 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.285887 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.285944 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.286067 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-xb6r7" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.286123 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.306747 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.428214 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88d842df-da24-4955-aae0-e6125a01ed0b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.428301 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.428460 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88d842df-da24-4955-aae0-e6125a01ed0b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.428545 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/88d842df-da24-4955-aae0-e6125a01ed0b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.428570 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qts82\" (UniqueName: \"kubernetes.io/projected/88d842df-da24-4955-aae0-e6125a01ed0b-kube-api-access-qts82\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.428601 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/88d842df-da24-4955-aae0-e6125a01ed0b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.428660 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/88d842df-da24-4955-aae0-e6125a01ed0b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.428891 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/88d842df-da24-4955-aae0-e6125a01ed0b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.530826 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88d842df-da24-4955-aae0-e6125a01ed0b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.531057 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.531110 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88d842df-da24-4955-aae0-e6125a01ed0b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.531141 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/88d842df-da24-4955-aae0-e6125a01ed0b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.531183 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qts82\" (UniqueName: \"kubernetes.io/projected/88d842df-da24-4955-aae0-e6125a01ed0b-kube-api-access-qts82\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.531213 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/88d842df-da24-4955-aae0-e6125a01ed0b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.531249 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/88d842df-da24-4955-aae0-e6125a01ed0b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.531311 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/88d842df-da24-4955-aae0-e6125a01ed0b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.532056 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/88d842df-da24-4955-aae0-e6125a01ed0b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.532372 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.532726 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/88d842df-da24-4955-aae0-e6125a01ed0b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.533548 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88d842df-da24-4955-aae0-e6125a01ed0b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.535669 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/88d842df-da24-4955-aae0-e6125a01ed0b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.542333 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88d842df-da24-4955-aae0-e6125a01ed0b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.549902 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.551212 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.559645 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.559922 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-h62lg" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.560041 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.560631 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/88d842df-da24-4955-aae0-e6125a01ed0b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.568359 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qts82\" (UniqueName: \"kubernetes.io/projected/88d842df-da24-4955-aae0-e6125a01ed0b-kube-api-access-qts82\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.584237 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"88d842df-da24-4955-aae0-e6125a01ed0b\") " pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.588687 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.609429 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.635734 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6ad10b89-b196-46ba-8b53-a10f8b2a5310-config-data\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.635841 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q29g8\" (UniqueName: \"kubernetes.io/projected/6ad10b89-b196-46ba-8b53-a10f8b2a5310-kube-api-access-q29g8\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.635873 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ad10b89-b196-46ba-8b53-a10f8b2a5310-memcached-tls-certs\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.635902 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad10b89-b196-46ba-8b53-a10f8b2a5310-combined-ca-bundle\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.636006 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6ad10b89-b196-46ba-8b53-a10f8b2a5310-kolla-config\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.737314 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6ad10b89-b196-46ba-8b53-a10f8b2a5310-kolla-config\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.737393 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6ad10b89-b196-46ba-8b53-a10f8b2a5310-config-data\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.737457 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q29g8\" (UniqueName: \"kubernetes.io/projected/6ad10b89-b196-46ba-8b53-a10f8b2a5310-kube-api-access-q29g8\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.737481 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ad10b89-b196-46ba-8b53-a10f8b2a5310-memcached-tls-certs\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.737504 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad10b89-b196-46ba-8b53-a10f8b2a5310-combined-ca-bundle\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.738120 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6ad10b89-b196-46ba-8b53-a10f8b2a5310-kolla-config\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.738842 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6ad10b89-b196-46ba-8b53-a10f8b2a5310-config-data\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.742507 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad10b89-b196-46ba-8b53-a10f8b2a5310-combined-ca-bundle\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.743343 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ad10b89-b196-46ba-8b53-a10f8b2a5310-memcached-tls-certs\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.763927 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q29g8\" (UniqueName: \"kubernetes.io/projected/6ad10b89-b196-46ba-8b53-a10f8b2a5310-kube-api-access-q29g8\") pod \"memcached-0\" (UID: \"6ad10b89-b196-46ba-8b53-a10f8b2a5310\") " pod="openstack/memcached-0" Dec 01 18:52:09 crc kubenswrapper[4935]: I1201 18:52:09.968991 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 01 18:52:11 crc kubenswrapper[4935]: I1201 18:52:11.263284 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" event={"ID":"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7","Type":"ContainerStarted","Data":"f0739a387b1cb23b0b5728c6a2c3b078ed3080e1ec2140e89e9e76ea79b59d07"} Dec 01 18:52:11 crc kubenswrapper[4935]: I1201 18:52:11.564887 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 18:52:11 crc kubenswrapper[4935]: I1201 18:52:11.566279 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 18:52:11 crc kubenswrapper[4935]: I1201 18:52:11.572764 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-fctj6" Dec 01 18:52:11 crc kubenswrapper[4935]: I1201 18:52:11.579950 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 18:52:11 crc kubenswrapper[4935]: I1201 18:52:11.678673 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2jg2\" (UniqueName: \"kubernetes.io/projected/c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd-kube-api-access-v2jg2\") pod \"kube-state-metrics-0\" (UID: \"c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd\") " pod="openstack/kube-state-metrics-0" Dec 01 18:52:11 crc kubenswrapper[4935]: I1201 18:52:11.782773 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2jg2\" (UniqueName: \"kubernetes.io/projected/c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd-kube-api-access-v2jg2\") pod \"kube-state-metrics-0\" (UID: \"c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd\") " pod="openstack/kube-state-metrics-0" Dec 01 18:52:11 crc kubenswrapper[4935]: I1201 18:52:11.816097 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2jg2\" (UniqueName: \"kubernetes.io/projected/c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd-kube-api-access-v2jg2\") pod \"kube-state-metrics-0\" (UID: \"c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd\") " pod="openstack/kube-state-metrics-0" Dec 01 18:52:11 crc kubenswrapper[4935]: I1201 18:52:11.911479 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.204657 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg"] Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.205901 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.211677 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-ui-dashboards" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.211948 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-ui-dashboards-sa-dockercfg-f6hnc" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.216755 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg"] Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.291225 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/202825a3-ae0e-443f-ac33-3ce527a1bbd3-serving-cert\") pod \"observability-ui-dashboards-7d5fb4cbfb-mt5kg\" (UID: \"202825a3-ae0e-443f-ac33-3ce527a1bbd3\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.291342 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qs66\" (UniqueName: \"kubernetes.io/projected/202825a3-ae0e-443f-ac33-3ce527a1bbd3-kube-api-access-8qs66\") pod \"observability-ui-dashboards-7d5fb4cbfb-mt5kg\" (UID: \"202825a3-ae0e-443f-ac33-3ce527a1bbd3\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.393492 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qs66\" (UniqueName: \"kubernetes.io/projected/202825a3-ae0e-443f-ac33-3ce527a1bbd3-kube-api-access-8qs66\") pod \"observability-ui-dashboards-7d5fb4cbfb-mt5kg\" (UID: \"202825a3-ae0e-443f-ac33-3ce527a1bbd3\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.393961 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/202825a3-ae0e-443f-ac33-3ce527a1bbd3-serving-cert\") pod \"observability-ui-dashboards-7d5fb4cbfb-mt5kg\" (UID: \"202825a3-ae0e-443f-ac33-3ce527a1bbd3\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg" Dec 01 18:52:12 crc kubenswrapper[4935]: E1201 18:52:12.394128 4935 secret.go:188] Couldn't get secret openshift-operators/observability-ui-dashboards: secret "observability-ui-dashboards" not found Dec 01 18:52:12 crc kubenswrapper[4935]: E1201 18:52:12.394206 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/202825a3-ae0e-443f-ac33-3ce527a1bbd3-serving-cert podName:202825a3-ae0e-443f-ac33-3ce527a1bbd3 nodeName:}" failed. No retries permitted until 2025-12-01 18:52:12.89418369 +0000 UTC m=+1346.915812949 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/202825a3-ae0e-443f-ac33-3ce527a1bbd3-serving-cert") pod "observability-ui-dashboards-7d5fb4cbfb-mt5kg" (UID: "202825a3-ae0e-443f-ac33-3ce527a1bbd3") : secret "observability-ui-dashboards" not found Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.454362 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qs66\" (UniqueName: \"kubernetes.io/projected/202825a3-ae0e-443f-ac33-3ce527a1bbd3-kube-api-access-8qs66\") pod \"observability-ui-dashboards-7d5fb4cbfb-mt5kg\" (UID: \"202825a3-ae0e-443f-ac33-3ce527a1bbd3\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.737748 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-bf44fd795-7hbd5"] Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.739069 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.765405 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-bf44fd795-7hbd5"] Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.878613 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.880736 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.883609 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.883810 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.885481 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.885699 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.886183 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-j26z8" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.892130 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.897074 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.902299 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bde63c92-0762-40a7-87a3-d2c104074025-service-ca\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.902360 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bde63c92-0762-40a7-87a3-d2c104074025-trusted-ca-bundle\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.902421 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djfpf\" (UniqueName: \"kubernetes.io/projected/bde63c92-0762-40a7-87a3-d2c104074025-kube-api-access-djfpf\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.902462 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bde63c92-0762-40a7-87a3-d2c104074025-console-config\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.902506 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bde63c92-0762-40a7-87a3-d2c104074025-console-serving-cert\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.902534 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/202825a3-ae0e-443f-ac33-3ce527a1bbd3-serving-cert\") pod \"observability-ui-dashboards-7d5fb4cbfb-mt5kg\" (UID: \"202825a3-ae0e-443f-ac33-3ce527a1bbd3\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.902584 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bde63c92-0762-40a7-87a3-d2c104074025-console-oauth-config\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.902599 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bde63c92-0762-40a7-87a3-d2c104074025-oauth-serving-cert\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:12 crc kubenswrapper[4935]: I1201 18:52:12.910745 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/202825a3-ae0e-443f-ac33-3ce527a1bbd3-serving-cert\") pod \"observability-ui-dashboards-7d5fb4cbfb-mt5kg\" (UID: \"202825a3-ae0e-443f-ac33-3ce527a1bbd3\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.003961 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bde63c92-0762-40a7-87a3-d2c104074025-trusted-ca-bundle\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004026 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2046ae6b-b1cd-421d-a4b0-686e1e29c407-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004052 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djfpf\" (UniqueName: \"kubernetes.io/projected/bde63c92-0762-40a7-87a3-d2c104074025-kube-api-access-djfpf\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004073 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2046ae6b-b1cd-421d-a4b0-686e1e29c407-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004099 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004129 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bde63c92-0762-40a7-87a3-d2c104074025-console-config\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004191 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004217 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bde63c92-0762-40a7-87a3-d2c104074025-console-serving-cert\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004250 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7d89\" (UniqueName: \"kubernetes.io/projected/2046ae6b-b1cd-421d-a4b0-686e1e29c407-kube-api-access-g7d89\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004270 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2046ae6b-b1cd-421d-a4b0-686e1e29c407-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004296 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bde63c92-0762-40a7-87a3-d2c104074025-console-oauth-config\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004311 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bde63c92-0762-40a7-87a3-d2c104074025-oauth-serving-cert\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004327 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-config\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004350 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.004387 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bde63c92-0762-40a7-87a3-d2c104074025-service-ca\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.005122 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bde63c92-0762-40a7-87a3-d2c104074025-service-ca\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.005429 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bde63c92-0762-40a7-87a3-d2c104074025-trusted-ca-bundle\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.006002 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bde63c92-0762-40a7-87a3-d2c104074025-oauth-serving-cert\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.006851 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bde63c92-0762-40a7-87a3-d2c104074025-console-config\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.008566 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bde63c92-0762-40a7-87a3-d2c104074025-console-oauth-config\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.009215 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bde63c92-0762-40a7-87a3-d2c104074025-console-serving-cert\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.033285 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djfpf\" (UniqueName: \"kubernetes.io/projected/bde63c92-0762-40a7-87a3-d2c104074025-kube-api-access-djfpf\") pod \"console-bf44fd795-7hbd5\" (UID: \"bde63c92-0762-40a7-87a3-d2c104074025\") " pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.064268 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.106777 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.107108 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7d89\" (UniqueName: \"kubernetes.io/projected/2046ae6b-b1cd-421d-a4b0-686e1e29c407-kube-api-access-g7d89\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.107563 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2046ae6b-b1cd-421d-a4b0-686e1e29c407-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.108557 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-config\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.108700 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.109238 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2046ae6b-b1cd-421d-a4b0-686e1e29c407-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.109069 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.108484 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2046ae6b-b1cd-421d-a4b0-686e1e29c407-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.110863 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2046ae6b-b1cd-421d-a4b0-686e1e29c407-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.111104 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.114562 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.116084 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-config\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.116418 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2046ae6b-b1cd-421d-a4b0-686e1e29c407-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.122367 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.123244 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7d89\" (UniqueName: \"kubernetes.io/projected/2046ae6b-b1cd-421d-a4b0-686e1e29c407-kube-api-access-g7d89\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.131065 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.144733 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2046ae6b-b1cd-421d-a4b0-686e1e29c407-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.149439 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"prometheus-metric-storage-0\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:13 crc kubenswrapper[4935]: I1201 18:52:13.209982 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.337754 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.340364 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.346572 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.346660 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-72phx" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.346962 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.347220 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.347612 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.366256 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.488020 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-config\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.488290 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.488417 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.488724 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.489579 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.489726 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.489997 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.490265 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc8fc\" (UniqueName: \"kubernetes.io/projected/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-kube-api-access-wc8fc\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.593176 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.593283 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.593347 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc8fc\" (UniqueName: \"kubernetes.io/projected/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-kube-api-access-wc8fc\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.593425 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-config\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.593490 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.593523 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.593562 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.593590 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.594124 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.594202 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.594335 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.594750 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-config\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.600635 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.601213 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.601444 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.610750 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc8fc\" (UniqueName: \"kubernetes.io/projected/ead52c85-6fd1-4ba0-9d5d-09955ce5b967-kube-api-access-wc8fc\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.631935 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"ead52c85-6fd1-4ba0-9d5d-09955ce5b967\") " pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:16 crc kubenswrapper[4935]: I1201 18:52:16.664749 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.526033 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-7twr7"] Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.527966 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.532756 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-pc2qv" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.535258 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.535563 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.559577 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-7twr7"] Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.600893 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-zxbb9"] Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.604045 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.626907 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/3798fbe5-306b-43f9-8f1f-ddc928996f88-ovn-controller-tls-certs\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.627075 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3798fbe5-306b-43f9-8f1f-ddc928996f88-var-log-ovn\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.627620 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3798fbe5-306b-43f9-8f1f-ddc928996f88-combined-ca-bundle\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.628516 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3798fbe5-306b-43f9-8f1f-ddc928996f88-scripts\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.628579 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzvqh\" (UniqueName: \"kubernetes.io/projected/3798fbe5-306b-43f9-8f1f-ddc928996f88-kube-api-access-dzvqh\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.628645 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3798fbe5-306b-43f9-8f1f-ddc928996f88-var-run\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.628715 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3798fbe5-306b-43f9-8f1f-ddc928996f88-var-run-ovn\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.632177 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-zxbb9"] Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.730476 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzvqh\" (UniqueName: \"kubernetes.io/projected/3798fbe5-306b-43f9-8f1f-ddc928996f88-kube-api-access-dzvqh\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.730555 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3798fbe5-306b-43f9-8f1f-ddc928996f88-var-run\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.730607 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f188ef33-496a-425c-87a1-54d67d7b42b5-var-lib\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.730650 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3798fbe5-306b-43f9-8f1f-ddc928996f88-var-run-ovn\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.730735 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/3798fbe5-306b-43f9-8f1f-ddc928996f88-ovn-controller-tls-certs\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.730757 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f188ef33-496a-425c-87a1-54d67d7b42b5-etc-ovs\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.730808 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3798fbe5-306b-43f9-8f1f-ddc928996f88-var-log-ovn\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.730831 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f188ef33-496a-425c-87a1-54d67d7b42b5-var-log\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.730857 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3798fbe5-306b-43f9-8f1f-ddc928996f88-combined-ca-bundle\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.730915 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f188ef33-496a-425c-87a1-54d67d7b42b5-scripts\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.730945 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwbm8\" (UniqueName: \"kubernetes.io/projected/f188ef33-496a-425c-87a1-54d67d7b42b5-kube-api-access-rwbm8\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.730990 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f188ef33-496a-425c-87a1-54d67d7b42b5-var-run\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.731015 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3798fbe5-306b-43f9-8f1f-ddc928996f88-scripts\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.731792 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3798fbe5-306b-43f9-8f1f-ddc928996f88-var-run\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.731893 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3798fbe5-306b-43f9-8f1f-ddc928996f88-var-log-ovn\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.731923 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3798fbe5-306b-43f9-8f1f-ddc928996f88-var-run-ovn\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.733496 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3798fbe5-306b-43f9-8f1f-ddc928996f88-scripts\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.735413 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/3798fbe5-306b-43f9-8f1f-ddc928996f88-ovn-controller-tls-certs\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.750137 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3798fbe5-306b-43f9-8f1f-ddc928996f88-combined-ca-bundle\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.755107 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzvqh\" (UniqueName: \"kubernetes.io/projected/3798fbe5-306b-43f9-8f1f-ddc928996f88-kube-api-access-dzvqh\") pod \"ovn-controller-7twr7\" (UID: \"3798fbe5-306b-43f9-8f1f-ddc928996f88\") " pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.833630 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f188ef33-496a-425c-87a1-54d67d7b42b5-var-lib\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.833749 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f188ef33-496a-425c-87a1-54d67d7b42b5-etc-ovs\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.833793 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f188ef33-496a-425c-87a1-54d67d7b42b5-var-log\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.833817 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f188ef33-496a-425c-87a1-54d67d7b42b5-var-lib\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.833854 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f188ef33-496a-425c-87a1-54d67d7b42b5-scripts\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.833884 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwbm8\" (UniqueName: \"kubernetes.io/projected/f188ef33-496a-425c-87a1-54d67d7b42b5-kube-api-access-rwbm8\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.833918 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f188ef33-496a-425c-87a1-54d67d7b42b5-var-run\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.833921 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f188ef33-496a-425c-87a1-54d67d7b42b5-var-log\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.834031 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f188ef33-496a-425c-87a1-54d67d7b42b5-var-run\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.834133 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f188ef33-496a-425c-87a1-54d67d7b42b5-etc-ovs\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.835629 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f188ef33-496a-425c-87a1-54d67d7b42b5-scripts\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.852261 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwbm8\" (UniqueName: \"kubernetes.io/projected/f188ef33-496a-425c-87a1-54d67d7b42b5-kube-api-access-rwbm8\") pod \"ovn-controller-ovs-zxbb9\" (UID: \"f188ef33-496a-425c-87a1-54d67d7b42b5\") " pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.859518 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7twr7" Dec 01 18:52:17 crc kubenswrapper[4935]: I1201 18:52:17.928688 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.439929 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.441951 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.450684 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.450932 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.451281 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.451472 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-cgkjs" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.463108 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.556306 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/096839ae-3e36-4242-bfbd-e19bf1ada9f2-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.556594 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/096839ae-3e36-4242-bfbd-e19bf1ada9f2-config\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.556638 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/096839ae-3e36-4242-bfbd-e19bf1ada9f2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.556702 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/096839ae-3e36-4242-bfbd-e19bf1ada9f2-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.556750 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.556771 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/096839ae-3e36-4242-bfbd-e19bf1ada9f2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.556789 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/096839ae-3e36-4242-bfbd-e19bf1ada9f2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.556836 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kh6mb\" (UniqueName: \"kubernetes.io/projected/096839ae-3e36-4242-bfbd-e19bf1ada9f2-kube-api-access-kh6mb\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.658475 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/096839ae-3e36-4242-bfbd-e19bf1ada9f2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.658570 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/096839ae-3e36-4242-bfbd-e19bf1ada9f2-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.658629 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.658646 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/096839ae-3e36-4242-bfbd-e19bf1ada9f2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.658664 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/096839ae-3e36-4242-bfbd-e19bf1ada9f2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.658707 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kh6mb\" (UniqueName: \"kubernetes.io/projected/096839ae-3e36-4242-bfbd-e19bf1ada9f2-kube-api-access-kh6mb\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.658754 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/096839ae-3e36-4242-bfbd-e19bf1ada9f2-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.658852 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/096839ae-3e36-4242-bfbd-e19bf1ada9f2-config\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.660721 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/096839ae-3e36-4242-bfbd-e19bf1ada9f2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.661045 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/096839ae-3e36-4242-bfbd-e19bf1ada9f2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.661859 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.661926 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/096839ae-3e36-4242-bfbd-e19bf1ada9f2-config\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.667539 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/096839ae-3e36-4242-bfbd-e19bf1ada9f2-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.668081 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/096839ae-3e36-4242-bfbd-e19bf1ada9f2-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.668112 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/096839ae-3e36-4242-bfbd-e19bf1ada9f2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.676196 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kh6mb\" (UniqueName: \"kubernetes.io/projected/096839ae-3e36-4242-bfbd-e19bf1ada9f2-kube-api-access-kh6mb\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.691578 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"096839ae-3e36-4242-bfbd-e19bf1ada9f2\") " pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:18 crc kubenswrapper[4935]: I1201 18:52:18.863450 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:21 crc kubenswrapper[4935]: I1201 18:52:21.608730 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 18:52:22 crc kubenswrapper[4935]: E1201 18:52:22.210665 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 01 18:52:22 crc kubenswrapper[4935]: E1201 18:52:22.210980 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4hrlk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-jqt6x_openstack(8083a542-a1a0-47d2-b940-c8303d540615): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:52:22 crc kubenswrapper[4935]: E1201 18:52:22.212621 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" podUID="8083a542-a1a0-47d2-b940-c8303d540615" Dec 01 18:52:22 crc kubenswrapper[4935]: W1201 18:52:22.298245 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88d842df_da24_4955_aae0_e6125a01ed0b.slice/crio-70ba2c008c283aca647c0394c282db4c8035cb3f971d13c0a3b75a67f20b5b1e WatchSource:0}: Error finding container 70ba2c008c283aca647c0394c282db4c8035cb3f971d13c0a3b75a67f20b5b1e: Status 404 returned error can't find the container with id 70ba2c008c283aca647c0394c282db4c8035cb3f971d13c0a3b75a67f20b5b1e Dec 01 18:52:22 crc kubenswrapper[4935]: E1201 18:52:22.391173 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 01 18:52:22 crc kubenswrapper[4935]: E1201 18:52:22.391499 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-94thk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-k6x9g_openstack(5496531a-5f85-4e1e-907b-c25fe65db26f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:52:22 crc kubenswrapper[4935]: E1201 18:52:22.396440 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-k6x9g" podUID="5496531a-5f85-4e1e-907b-c25fe65db26f" Dec 01 18:52:22 crc kubenswrapper[4935]: I1201 18:52:22.427279 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"88d842df-da24-4955-aae0-e6125a01ed0b","Type":"ContainerStarted","Data":"70ba2c008c283aca647c0394c282db4c8035cb3f971d13c0a3b75a67f20b5b1e"} Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.071141 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.095989 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-k6x9g" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.152735 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hrlk\" (UniqueName: \"kubernetes.io/projected/8083a542-a1a0-47d2-b940-c8303d540615-kube-api-access-4hrlk\") pod \"8083a542-a1a0-47d2-b940-c8303d540615\" (UID: \"8083a542-a1a0-47d2-b940-c8303d540615\") " Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.152867 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8083a542-a1a0-47d2-b940-c8303d540615-config\") pod \"8083a542-a1a0-47d2-b940-c8303d540615\" (UID: \"8083a542-a1a0-47d2-b940-c8303d540615\") " Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.152914 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8083a542-a1a0-47d2-b940-c8303d540615-dns-svc\") pod \"8083a542-a1a0-47d2-b940-c8303d540615\" (UID: \"8083a542-a1a0-47d2-b940-c8303d540615\") " Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.152941 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5496531a-5f85-4e1e-907b-c25fe65db26f-config\") pod \"5496531a-5f85-4e1e-907b-c25fe65db26f\" (UID: \"5496531a-5f85-4e1e-907b-c25fe65db26f\") " Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.152958 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94thk\" (UniqueName: \"kubernetes.io/projected/5496531a-5f85-4e1e-907b-c25fe65db26f-kube-api-access-94thk\") pod \"5496531a-5f85-4e1e-907b-c25fe65db26f\" (UID: \"5496531a-5f85-4e1e-907b-c25fe65db26f\") " Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.153674 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5496531a-5f85-4e1e-907b-c25fe65db26f-config" (OuterVolumeSpecName: "config") pod "5496531a-5f85-4e1e-907b-c25fe65db26f" (UID: "5496531a-5f85-4e1e-907b-c25fe65db26f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.154217 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8083a542-a1a0-47d2-b940-c8303d540615-config" (OuterVolumeSpecName: "config") pod "8083a542-a1a0-47d2-b940-c8303d540615" (UID: "8083a542-a1a0-47d2-b940-c8303d540615"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.154437 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8083a542-a1a0-47d2-b940-c8303d540615-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8083a542-a1a0-47d2-b940-c8303d540615" (UID: "8083a542-a1a0-47d2-b940-c8303d540615"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.159472 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5496531a-5f85-4e1e-907b-c25fe65db26f-kube-api-access-94thk" (OuterVolumeSpecName: "kube-api-access-94thk") pod "5496531a-5f85-4e1e-907b-c25fe65db26f" (UID: "5496531a-5f85-4e1e-907b-c25fe65db26f"). InnerVolumeSpecName "kube-api-access-94thk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.161330 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8083a542-a1a0-47d2-b940-c8303d540615-kube-api-access-4hrlk" (OuterVolumeSpecName: "kube-api-access-4hrlk") pod "8083a542-a1a0-47d2-b940-c8303d540615" (UID: "8083a542-a1a0-47d2-b940-c8303d540615"). InnerVolumeSpecName "kube-api-access-4hrlk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.255817 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8083a542-a1a0-47d2-b940-c8303d540615-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.255848 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8083a542-a1a0-47d2-b940-c8303d540615-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.255859 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5496531a-5f85-4e1e-907b-c25fe65db26f-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.255868 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94thk\" (UniqueName: \"kubernetes.io/projected/5496531a-5f85-4e1e-907b-c25fe65db26f-kube-api-access-94thk\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.255879 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hrlk\" (UniqueName: \"kubernetes.io/projected/8083a542-a1a0-47d2-b940-c8303d540615-kube-api-access-4hrlk\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.423841 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.431635 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.439667 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-k6x9g" event={"ID":"5496531a-5f85-4e1e-907b-c25fe65db26f","Type":"ContainerDied","Data":"8333d0b1554bad64ae60eeaed9582731ac1abacbcdead13623ccdb335bb2e169"} Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.439798 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-k6x9g" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.442653 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" event={"ID":"8083a542-a1a0-47d2-b940-c8303d540615","Type":"ContainerDied","Data":"2d1693b5fb26163cca92077d8025688946bc2e50a7eb76d3a845283c5dd840a5"} Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.442757 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-jqt6x" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.443872 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.447419 4935 generic.go:334] "Generic (PLEG): container finished" podID="6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" containerID="6dc791d4272e96486f0ab10bc9b131327850e08d946426ce0519b2ebcadcd72d" exitCode=0 Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.447455 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" event={"ID":"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7","Type":"ContainerDied","Data":"6dc791d4272e96486f0ab10bc9b131327850e08d946426ce0519b2ebcadcd72d"} Dec 01 18:52:23 crc kubenswrapper[4935]: W1201 18:52:23.495579 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2046ae6b_b1cd_421d_a4b0_686e1e29c407.slice/crio-6f80d7e561c1d0e53fd52257effb11a96633dbf784fa9ad806eb0dfb0785ca69 WatchSource:0}: Error finding container 6f80d7e561c1d0e53fd52257effb11a96633dbf784fa9ad806eb0dfb0785ca69: Status 404 returned error can't find the container with id 6f80d7e561c1d0e53fd52257effb11a96633dbf784fa9ad806eb0dfb0785ca69 Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.672448 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k6x9g"] Dec 01 18:52:23 crc kubenswrapper[4935]: E1201 18:52:23.711949 4935 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 01 18:52:23 crc kubenswrapper[4935]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 01 18:52:23 crc kubenswrapper[4935]: > podSandboxID="f0739a387b1cb23b0b5728c6a2c3b078ed3080e1ec2140e89e9e76ea79b59d07" Dec 01 18:52:23 crc kubenswrapper[4935]: E1201 18:52:23.712285 4935 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 01 18:52:23 crc kubenswrapper[4935]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bkcz9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-cxptx_openstack(6a28f24e-19d3-4eb5-b972-6be6e2e47ed7): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 01 18:52:23 crc kubenswrapper[4935]: > logger="UnhandledError" Dec 01 18:52:23 crc kubenswrapper[4935]: E1201 18:52:23.713720 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" podUID="6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.726487 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k6x9g"] Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.742180 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-jqt6x"] Dec 01 18:52:23 crc kubenswrapper[4935]: I1201 18:52:23.748818 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-jqt6x"] Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.145703 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.203045 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 18:52:24 crc kubenswrapper[4935]: W1201 18:52:24.204988 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfce93449_11d7_490f_9456_8f8667b9cb6d.slice/crio-95b0feb6cbea065497eb5d4b14056c5ac69ebf59a54d666f3349991bafc78904 WatchSource:0}: Error finding container 95b0feb6cbea065497eb5d4b14056c5ac69ebf59a54d666f3349991bafc78904: Status 404 returned error can't find the container with id 95b0feb6cbea065497eb5d4b14056c5ac69ebf59a54d666f3349991bafc78904 Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.213039 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.221889 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg"] Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.232373 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-7twr7"] Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.240778 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-bf44fd795-7hbd5"] Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.248264 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-xdzvr"] Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.346025 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.346295 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.386475 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 18:52:24 crc kubenswrapper[4935]: W1201 18:52:24.415613 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podead52c85_6fd1_4ba0_9d5d_09955ce5b967.slice/crio-6e486baa7089418478ab0acac46914afdede7ef5f117537ea576d6ee23a91957 WatchSource:0}: Error finding container 6e486baa7089418478ab0acac46914afdede7ef5f117537ea576d6ee23a91957: Status 404 returned error can't find the container with id 6e486baa7089418478ab0acac46914afdede7ef5f117537ea576d6ee23a91957 Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.461315 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"fce93449-11d7-490f-9456-8f8667b9cb6d","Type":"ContainerStarted","Data":"95b0feb6cbea065497eb5d4b14056c5ac69ebf59a54d666f3349991bafc78904"} Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.463436 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7twr7" event={"ID":"3798fbe5-306b-43f9-8f1f-ddc928996f88","Type":"ContainerStarted","Data":"c287181064bc6bdb275c5266d49c5613ede1142bc912e6d2a0ed15e9f728f6eb"} Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.466282 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2046ae6b-b1cd-421d-a4b0-686e1e29c407","Type":"ContainerStarted","Data":"6f80d7e561c1d0e53fd52257effb11a96633dbf784fa9ad806eb0dfb0785ca69"} Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.468493 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ead52c85-6fd1-4ba0-9d5d-09955ce5b967","Type":"ContainerStarted","Data":"6e486baa7089418478ab0acac46914afdede7ef5f117537ea576d6ee23a91957"} Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.469528 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd","Type":"ContainerStarted","Data":"250a592c0089d04b10a9538db998ca84336ce19007ca8a6f2b3e1cdf3ce3f90d"} Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.470643 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-bf44fd795-7hbd5" event={"ID":"bde63c92-0762-40a7-87a3-d2c104074025","Type":"ContainerStarted","Data":"32a113a79e49728f7b0ec9faccd8bdc5346993bb6ae6216497ed2c07e3ee50aa"} Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.471927 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"6ad10b89-b196-46ba-8b53-a10f8b2a5310","Type":"ContainerStarted","Data":"aff25f3d2317494be9762bfc329f08764bfba12ea9ed16b1ab9791cb8bf149e2"} Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.473167 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8bd64079-678d-43de-aeb6-6818338d5997","Type":"ContainerStarted","Data":"d8b01d11c0115a84afa8993de3e409974a4da2ca3769434db6a71c312eeeb9f0"} Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.474025 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg" event={"ID":"202825a3-ae0e-443f-ac33-3ce527a1bbd3","Type":"ContainerStarted","Data":"76fc3cbc6ddd9bde0aee8fca6c9bac24f5e6ea94d64fda9037858de74bfb135d"} Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.475080 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" event={"ID":"d028f398-7da7-4877-a954-6322e304e369","Type":"ContainerStarted","Data":"cdab3827e5c3b26e7b76f080bb502297717be4be8d298399f4c8d13c9d91937f"} Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.476927 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"a20a342c-d5f0-4a57-b485-5e8a122a6034","Type":"ContainerStarted","Data":"f6407399f9d339f18af8c89296e9055635b379ad75cf3a35a21e8665b4f58203"} Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.523698 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5496531a-5f85-4e1e-907b-c25fe65db26f" path="/var/lib/kubelet/pods/5496531a-5f85-4e1e-907b-c25fe65db26f/volumes" Dec 01 18:52:24 crc kubenswrapper[4935]: I1201 18:52:24.524095 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8083a542-a1a0-47d2-b940-c8303d540615" path="/var/lib/kubelet/pods/8083a542-a1a0-47d2-b940-c8303d540615/volumes" Dec 01 18:52:25 crc kubenswrapper[4935]: I1201 18:52:25.056051 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-zxbb9"] Dec 01 18:52:25 crc kubenswrapper[4935]: I1201 18:52:25.424442 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 18:52:25 crc kubenswrapper[4935]: I1201 18:52:25.489033 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" event={"ID":"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7","Type":"ContainerStarted","Data":"56ef4276a6505642e9b01426afc20df59347466c2226b473c3c7e506316d71ec"} Dec 01 18:52:25 crc kubenswrapper[4935]: I1201 18:52:25.489248 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:25 crc kubenswrapper[4935]: I1201 18:52:25.494450 4935 generic.go:334] "Generic (PLEG): container finished" podID="d028f398-7da7-4877-a954-6322e304e369" containerID="cdd47d7d3507285a9802d9145d260cc1b51ad07eb7cc6bc00073fda17ab2c6e2" exitCode=0 Dec 01 18:52:25 crc kubenswrapper[4935]: I1201 18:52:25.494531 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" event={"ID":"d028f398-7da7-4877-a954-6322e304e369","Type":"ContainerDied","Data":"cdd47d7d3507285a9802d9145d260cc1b51ad07eb7cc6bc00073fda17ab2c6e2"} Dec 01 18:52:25 crc kubenswrapper[4935]: I1201 18:52:25.498099 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-bf44fd795-7hbd5" event={"ID":"bde63c92-0762-40a7-87a3-d2c104074025","Type":"ContainerStarted","Data":"280159a989b3a3196a751cd6b705798d10acd066615f26ae4d1fa61e0b46d7a0"} Dec 01 18:52:25 crc kubenswrapper[4935]: I1201 18:52:25.510223 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" podStartSLOduration=9.209883536 podStartE2EDuration="21.510202614s" podCreationTimestamp="2025-12-01 18:52:04 +0000 UTC" firstStartedPulling="2025-12-01 18:52:10.269502436 +0000 UTC m=+1344.291131705" lastFinishedPulling="2025-12-01 18:52:22.569821524 +0000 UTC m=+1356.591450783" observedRunningTime="2025-12-01 18:52:25.504992469 +0000 UTC m=+1359.526621748" watchObservedRunningTime="2025-12-01 18:52:25.510202614 +0000 UTC m=+1359.531831893" Dec 01 18:52:25 crc kubenswrapper[4935]: I1201 18:52:25.532691 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-bf44fd795-7hbd5" podStartSLOduration=13.532669393 podStartE2EDuration="13.532669393s" podCreationTimestamp="2025-12-01 18:52:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:52:25.528503342 +0000 UTC m=+1359.550132611" watchObservedRunningTime="2025-12-01 18:52:25.532669393 +0000 UTC m=+1359.554298652" Dec 01 18:52:26 crc kubenswrapper[4935]: I1201 18:52:26.538533 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zxbb9" event={"ID":"f188ef33-496a-425c-87a1-54d67d7b42b5","Type":"ContainerStarted","Data":"4214980ce4c89dbdb38907fdd4fedbd681c515abfceed4776ec0235b53c7efaa"} Dec 01 18:52:27 crc kubenswrapper[4935]: W1201 18:52:27.742515 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod096839ae_3e36_4242_bfbd_e19bf1ada9f2.slice/crio-d1b09dd8ecd5e0a03ea4539e181ff61671a5645ba90994f17ea0166757c9a389 WatchSource:0}: Error finding container d1b09dd8ecd5e0a03ea4539e181ff61671a5645ba90994f17ea0166757c9a389: Status 404 returned error can't find the container with id d1b09dd8ecd5e0a03ea4539e181ff61671a5645ba90994f17ea0166757c9a389 Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.198616 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-wh57b"] Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.201453 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.203897 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.211503 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-wh57b"] Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.294766 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-config\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.294816 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7xgd\" (UniqueName: \"kubernetes.io/projected/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-kube-api-access-t7xgd\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.294837 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.294865 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-combined-ca-bundle\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.294928 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-ovn-rundir\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.294949 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-ovs-rundir\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.396348 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-ovn-rundir\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.396414 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-ovs-rundir\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.396529 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-config\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.396564 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7xgd\" (UniqueName: \"kubernetes.io/projected/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-kube-api-access-t7xgd\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.396590 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.396625 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-combined-ca-bundle\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.396803 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-ovn-rundir\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.396982 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-ovs-rundir\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.398993 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-config\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.402995 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.406880 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-combined-ca-bundle\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.430693 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7xgd\" (UniqueName: \"kubernetes.io/projected/d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7-kube-api-access-t7xgd\") pod \"ovn-controller-metrics-wh57b\" (UID: \"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7\") " pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.437596 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-cxptx"] Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.456514 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" podUID="6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" containerName="dnsmasq-dns" containerID="cri-o://56ef4276a6505642e9b01426afc20df59347466c2226b473c3c7e506316d71ec" gracePeriod=10 Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.465004 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-4c2jr"] Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.467090 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.476270 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.485022 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-4c2jr"] Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.535826 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-wh57b" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.585069 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"096839ae-3e36-4242-bfbd-e19bf1ada9f2","Type":"ContainerStarted","Data":"d1b09dd8ecd5e0a03ea4539e181ff61671a5645ba90994f17ea0166757c9a389"} Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.614168 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-config\") pod \"dnsmasq-dns-6bc7876d45-4c2jr\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.614336 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttpfk\" (UniqueName: \"kubernetes.io/projected/0ca38f45-895b-4bd4-812c-033deaa9eb45-kube-api-access-ttpfk\") pod \"dnsmasq-dns-6bc7876d45-4c2jr\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.614403 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-4c2jr\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.614483 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-4c2jr\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.701899 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-xdzvr"] Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.718030 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-4c2jr\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.718530 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-config\") pod \"dnsmasq-dns-6bc7876d45-4c2jr\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.718594 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttpfk\" (UniqueName: \"kubernetes.io/projected/0ca38f45-895b-4bd4-812c-033deaa9eb45-kube-api-access-ttpfk\") pod \"dnsmasq-dns-6bc7876d45-4c2jr\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.718626 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-4c2jr\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.718949 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-4c2jr\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.719391 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-4c2jr\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.720125 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-config\") pod \"dnsmasq-dns-6bc7876d45-4c2jr\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.744072 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttpfk\" (UniqueName: \"kubernetes.io/projected/0ca38f45-895b-4bd4-812c-033deaa9eb45-kube-api-access-ttpfk\") pod \"dnsmasq-dns-6bc7876d45-4c2jr\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.765258 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-n7vb6"] Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.766991 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.772597 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.782199 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-n7vb6"] Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.822690 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.822776 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-config\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.822818 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.822868 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5x99n\" (UniqueName: \"kubernetes.io/projected/c216b25c-f2ed-423c-8e80-829af72648ae-kube-api-access-5x99n\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.822922 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-dns-svc\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.856732 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.924972 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-config\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.925040 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.925093 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5x99n\" (UniqueName: \"kubernetes.io/projected/c216b25c-f2ed-423c-8e80-829af72648ae-kube-api-access-5x99n\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.925167 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-dns-svc\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.925212 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.926002 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-config\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.926081 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.926719 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.926834 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-dns-svc\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:28 crc kubenswrapper[4935]: I1201 18:52:28.940402 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5x99n\" (UniqueName: \"kubernetes.io/projected/c216b25c-f2ed-423c-8e80-829af72648ae-kube-api-access-5x99n\") pod \"dnsmasq-dns-8554648995-n7vb6\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:29 crc kubenswrapper[4935]: I1201 18:52:29.150541 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:29 crc kubenswrapper[4935]: I1201 18:52:29.600114 4935 generic.go:334] "Generic (PLEG): container finished" podID="6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" containerID="56ef4276a6505642e9b01426afc20df59347466c2226b473c3c7e506316d71ec" exitCode=0 Dec 01 18:52:29 crc kubenswrapper[4935]: I1201 18:52:29.600173 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" event={"ID":"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7","Type":"ContainerDied","Data":"56ef4276a6505642e9b01426afc20df59347466c2226b473c3c7e506316d71ec"} Dec 01 18:52:30 crc kubenswrapper[4935]: I1201 18:52:30.299344 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" podUID="6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.069412 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.070245 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.077654 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.470276 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.637163 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-config\") pod \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\" (UID: \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\") " Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.637247 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-dns-svc\") pod \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\" (UID: \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\") " Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.637367 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkcz9\" (UniqueName: \"kubernetes.io/projected/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-kube-api-access-bkcz9\") pod \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\" (UID: \"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7\") " Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.643105 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-kube-api-access-bkcz9" (OuterVolumeSpecName: "kube-api-access-bkcz9") pod "6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" (UID: "6a28f24e-19d3-4eb5-b972-6be6e2e47ed7"). InnerVolumeSpecName "kube-api-access-bkcz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.653137 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" event={"ID":"6a28f24e-19d3-4eb5-b972-6be6e2e47ed7","Type":"ContainerDied","Data":"f0739a387b1cb23b0b5728c6a2c3b078ed3080e1ec2140e89e9e76ea79b59d07"} Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.653213 4935 scope.go:117] "RemoveContainer" containerID="56ef4276a6505642e9b01426afc20df59347466c2226b473c3c7e506316d71ec" Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.655776 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-cxptx" Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.656736 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-bf44fd795-7hbd5" Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.717023 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" (UID: "6a28f24e-19d3-4eb5-b972-6be6e2e47ed7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.730068 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-config" (OuterVolumeSpecName: "config") pod "6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" (UID: "6a28f24e-19d3-4eb5-b972-6be6e2e47ed7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.737923 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-66ff47d4bb-794fx"] Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.741213 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.741239 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:33 crc kubenswrapper[4935]: I1201 18:52:33.741252 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkcz9\" (UniqueName: \"kubernetes.io/projected/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7-kube-api-access-bkcz9\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:34 crc kubenswrapper[4935]: I1201 18:52:34.000086 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-cxptx"] Dec 01 18:52:34 crc kubenswrapper[4935]: I1201 18:52:34.009088 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-cxptx"] Dec 01 18:52:34 crc kubenswrapper[4935]: I1201 18:52:34.519384 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" path="/var/lib/kubelet/pods/6a28f24e-19d3-4eb5-b972-6be6e2e47ed7/volumes" Dec 01 18:52:40 crc kubenswrapper[4935]: I1201 18:52:40.605094 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-wh57b"] Dec 01 18:52:41 crc kubenswrapper[4935]: I1201 18:52:41.146041 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-4c2jr"] Dec 01 18:52:41 crc kubenswrapper[4935]: I1201 18:52:41.232313 4935 scope.go:117] "RemoveContainer" containerID="6dc791d4272e96486f0ab10bc9b131327850e08d946426ce0519b2ebcadcd72d" Dec 01 18:52:41 crc kubenswrapper[4935]: I1201 18:52:41.280853 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 18:52:41 crc kubenswrapper[4935]: W1201 18:52:41.310654 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ca38f45_895b_4bd4_812c_033deaa9eb45.slice/crio-2a53802676efc7c4dcdf14d97caa46fee16a21672ca2d4e75e3f2c0780b07a71 WatchSource:0}: Error finding container 2a53802676efc7c4dcdf14d97caa46fee16a21672ca2d4e75e3f2c0780b07a71: Status 404 returned error can't find the container with id 2a53802676efc7c4dcdf14d97caa46fee16a21672ca2d4e75e3f2c0780b07a71 Dec 01 18:52:41 crc kubenswrapper[4935]: I1201 18:52:41.584198 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-n7vb6"] Dec 01 18:52:41 crc kubenswrapper[4935]: I1201 18:52:41.740518 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-n7vb6" event={"ID":"c216b25c-f2ed-423c-8e80-829af72648ae","Type":"ContainerStarted","Data":"99b17b54ba1387a47513707bce80f907a0a57a8f5ea4f6d870a7740afad99652"} Dec 01 18:52:41 crc kubenswrapper[4935]: I1201 18:52:41.744609 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" event={"ID":"0ca38f45-895b-4bd4-812c-033deaa9eb45","Type":"ContainerStarted","Data":"2a53802676efc7c4dcdf14d97caa46fee16a21672ca2d4e75e3f2c0780b07a71"} Dec 01 18:52:41 crc kubenswrapper[4935]: I1201 18:52:41.745389 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-wh57b" event={"ID":"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7","Type":"ContainerStarted","Data":"f52433e4acc8daecb40b02731eaa3d5436e25ad8ab6f512aa717635c09220938"} Dec 01 18:52:42 crc kubenswrapper[4935]: E1201 18:52:42.187108 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 01 18:52:42 crc kubenswrapper[4935]: E1201 18:52:42.187166 4935 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 01 18:52:42 crc kubenswrapper[4935]: E1201 18:52:42.187312 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-v2jg2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 18:52:42 crc kubenswrapper[4935]: E1201 18:52:42.188387 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd" Dec 01 18:52:42 crc kubenswrapper[4935]: I1201 18:52:42.758377 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"6ad10b89-b196-46ba-8b53-a10f8b2a5310","Type":"ContainerStarted","Data":"bb67ecee2e068bc38e014d089d0af32e8f5cd55ee0436df32fd353ceb399919e"} Dec 01 18:52:42 crc kubenswrapper[4935]: I1201 18:52:42.760053 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 01 18:52:42 crc kubenswrapper[4935]: E1201 18:52:42.759069 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd" Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.770502 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8bd64079-678d-43de-aeb6-6818338d5997","Type":"ContainerStarted","Data":"0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a"} Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.773353 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg" event={"ID":"202825a3-ae0e-443f-ac33-3ce527a1bbd3","Type":"ContainerStarted","Data":"080c21bcee19fadcfe74098175a9b225a1dfccbcd57d8694e205ee3358200bda"} Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.777042 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" event={"ID":"d028f398-7da7-4877-a954-6322e304e369","Type":"ContainerStarted","Data":"2a9c1fb9278fc32fc35a34867d0f98c704f913d00e0d0be7c9a9c4a305be56e4"} Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.777271 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.777303 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" podUID="d028f398-7da7-4877-a954-6322e304e369" containerName="dnsmasq-dns" containerID="cri-o://2a9c1fb9278fc32fc35a34867d0f98c704f913d00e0d0be7c9a9c4a305be56e4" gracePeriod=10 Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.779530 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zxbb9" event={"ID":"f188ef33-496a-425c-87a1-54d67d7b42b5","Type":"ContainerStarted","Data":"e92deb174e999560ccbc39637456fbb884496515dae782dde1a00420fdbae463"} Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.783891 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ead52c85-6fd1-4ba0-9d5d-09955ce5b967","Type":"ContainerStarted","Data":"557b5d1ff2fb5d524953f0641da044b2ffba262b0ca0e0c393ade1dc743b795b"} Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.794354 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7twr7" event={"ID":"3798fbe5-306b-43f9-8f1f-ddc928996f88","Type":"ContainerStarted","Data":"1b269fa9ff137fb6149a116663ce502069e1ef75162742d8f18e25da099ed177"} Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.794481 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-7twr7" Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.798424 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=25.006649813 podStartE2EDuration="34.798409106s" podCreationTimestamp="2025-12-01 18:52:09 +0000 UTC" firstStartedPulling="2025-12-01 18:52:23.447106907 +0000 UTC m=+1357.468736166" lastFinishedPulling="2025-12-01 18:52:33.2388662 +0000 UTC m=+1367.260495459" observedRunningTime="2025-12-01 18:52:42.80111558 +0000 UTC m=+1376.822744839" watchObservedRunningTime="2025-12-01 18:52:43.798409106 +0000 UTC m=+1377.820038355" Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.808664 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"88d842df-da24-4955-aae0-e6125a01ed0b","Type":"ContainerStarted","Data":"608916b02fcc94db32f84370a2bbf4b64818d65c6008caa924a9a4fad31611bf"} Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.815979 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"a20a342c-d5f0-4a57-b485-5e8a122a6034","Type":"ContainerStarted","Data":"dbab7c5e0d3b633236d1a6e1d1278918a9cfed7cd531774a49457fd0e85c344c"} Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.823607 4935 generic.go:334] "Generic (PLEG): container finished" podID="c216b25c-f2ed-423c-8e80-829af72648ae" containerID="7665c4205dd3065e37807673c831952c042d13e26eebd4a18e981abfc5c2be76" exitCode=0 Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.823686 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-n7vb6" event={"ID":"c216b25c-f2ed-423c-8e80-829af72648ae","Type":"ContainerDied","Data":"7665c4205dd3065e37807673c831952c042d13e26eebd4a18e981abfc5c2be76"} Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.860877 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-mt5kg" podStartSLOduration=15.973012812 podStartE2EDuration="31.86085582s" podCreationTimestamp="2025-12-01 18:52:12 +0000 UTC" firstStartedPulling="2025-12-01 18:52:24.224852315 +0000 UTC m=+1358.246481574" lastFinishedPulling="2025-12-01 18:52:40.112695323 +0000 UTC m=+1374.134324582" observedRunningTime="2025-12-01 18:52:43.833682151 +0000 UTC m=+1377.855311410" watchObservedRunningTime="2025-12-01 18:52:43.86085582 +0000 UTC m=+1377.882485079" Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.869892 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-7twr7" podStartSLOduration=9.48027363 podStartE2EDuration="26.869876184s" podCreationTimestamp="2025-12-01 18:52:17 +0000 UTC" firstStartedPulling="2025-12-01 18:52:24.226803587 +0000 UTC m=+1358.248432846" lastFinishedPulling="2025-12-01 18:52:41.616406141 +0000 UTC m=+1375.638035400" observedRunningTime="2025-12-01 18:52:43.854670474 +0000 UTC m=+1377.876299733" watchObservedRunningTime="2025-12-01 18:52:43.869876184 +0000 UTC m=+1377.891505443" Dec 01 18:52:43 crc kubenswrapper[4935]: I1201 18:52:43.900589 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" podStartSLOduration=38.900570665000004 podStartE2EDuration="38.900570665s" podCreationTimestamp="2025-12-01 18:52:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:52:43.880053646 +0000 UTC m=+1377.901682905" watchObservedRunningTime="2025-12-01 18:52:43.900570665 +0000 UTC m=+1377.922199924" Dec 01 18:52:44 crc kubenswrapper[4935]: I1201 18:52:44.837621 4935 generic.go:334] "Generic (PLEG): container finished" podID="d028f398-7da7-4877-a954-6322e304e369" containerID="2a9c1fb9278fc32fc35a34867d0f98c704f913d00e0d0be7c9a9c4a305be56e4" exitCode=0 Dec 01 18:52:44 crc kubenswrapper[4935]: I1201 18:52:44.837698 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" event={"ID":"d028f398-7da7-4877-a954-6322e304e369","Type":"ContainerDied","Data":"2a9c1fb9278fc32fc35a34867d0f98c704f913d00e0d0be7c9a9c4a305be56e4"} Dec 01 18:52:44 crc kubenswrapper[4935]: I1201 18:52:44.840741 4935 generic.go:334] "Generic (PLEG): container finished" podID="f188ef33-496a-425c-87a1-54d67d7b42b5" containerID="e92deb174e999560ccbc39637456fbb884496515dae782dde1a00420fdbae463" exitCode=0 Dec 01 18:52:44 crc kubenswrapper[4935]: I1201 18:52:44.840839 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zxbb9" event={"ID":"f188ef33-496a-425c-87a1-54d67d7b42b5","Type":"ContainerDied","Data":"e92deb174e999560ccbc39637456fbb884496515dae782dde1a00420fdbae463"} Dec 01 18:52:44 crc kubenswrapper[4935]: I1201 18:52:44.843424 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"fce93449-11d7-490f-9456-8f8667b9cb6d","Type":"ContainerStarted","Data":"4436d0a1aa0b94abbc6edba1f66766705f930f3895beeda29b89c6f73d2deabb"} Dec 01 18:52:44 crc kubenswrapper[4935]: I1201 18:52:44.844942 4935 generic.go:334] "Generic (PLEG): container finished" podID="0ca38f45-895b-4bd4-812c-033deaa9eb45" containerID="b7d9f1dc9384ff4847cf252de0c4e95135774828145a5c20d570a789002c4646" exitCode=0 Dec 01 18:52:44 crc kubenswrapper[4935]: I1201 18:52:44.845002 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" event={"ID":"0ca38f45-895b-4bd4-812c-033deaa9eb45","Type":"ContainerDied","Data":"b7d9f1dc9384ff4847cf252de0c4e95135774828145a5c20d570a789002c4646"} Dec 01 18:52:44 crc kubenswrapper[4935]: I1201 18:52:44.846938 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"096839ae-3e36-4242-bfbd-e19bf1ada9f2","Type":"ContainerStarted","Data":"c241fdf1d984a655e2d870ab6fa72f63f0054d04e738b3d968e9d0f070a015e0"} Dec 01 18:52:45 crc kubenswrapper[4935]: I1201 18:52:45.862050 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" event={"ID":"d028f398-7da7-4877-a954-6322e304e369","Type":"ContainerDied","Data":"cdab3827e5c3b26e7b76f080bb502297717be4be8d298399f4c8d13c9d91937f"} Dec 01 18:52:45 crc kubenswrapper[4935]: I1201 18:52:45.862466 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cdab3827e5c3b26e7b76f080bb502297717be4be8d298399f4c8d13c9d91937f" Dec 01 18:52:45 crc kubenswrapper[4935]: I1201 18:52:45.864959 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2046ae6b-b1cd-421d-a4b0-686e1e29c407","Type":"ContainerStarted","Data":"b0248489c60e5fc8835d98c149d9d39763045268826d86eb62e5e34dbc7a8977"} Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.023099 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.160393 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28hg5\" (UniqueName: \"kubernetes.io/projected/d028f398-7da7-4877-a954-6322e304e369-kube-api-access-28hg5\") pod \"d028f398-7da7-4877-a954-6322e304e369\" (UID: \"d028f398-7da7-4877-a954-6322e304e369\") " Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.160592 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d028f398-7da7-4877-a954-6322e304e369-config\") pod \"d028f398-7da7-4877-a954-6322e304e369\" (UID: \"d028f398-7da7-4877-a954-6322e304e369\") " Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.160632 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d028f398-7da7-4877-a954-6322e304e369-dns-svc\") pod \"d028f398-7da7-4877-a954-6322e304e369\" (UID: \"d028f398-7da7-4877-a954-6322e304e369\") " Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.169003 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d028f398-7da7-4877-a954-6322e304e369-kube-api-access-28hg5" (OuterVolumeSpecName: "kube-api-access-28hg5") pod "d028f398-7da7-4877-a954-6322e304e369" (UID: "d028f398-7da7-4877-a954-6322e304e369"). InnerVolumeSpecName "kube-api-access-28hg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.263225 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28hg5\" (UniqueName: \"kubernetes.io/projected/d028f398-7da7-4877-a954-6322e304e369-kube-api-access-28hg5\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.317703 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d028f398-7da7-4877-a954-6322e304e369-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d028f398-7da7-4877-a954-6322e304e369" (UID: "d028f398-7da7-4877-a954-6322e304e369"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.340911 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d028f398-7da7-4877-a954-6322e304e369-config" (OuterVolumeSpecName: "config") pod "d028f398-7da7-4877-a954-6322e304e369" (UID: "d028f398-7da7-4877-a954-6322e304e369"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.365110 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d028f398-7da7-4877-a954-6322e304e369-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.365237 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d028f398-7da7-4877-a954-6322e304e369-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.877079 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ead52c85-6fd1-4ba0-9d5d-09955ce5b967","Type":"ContainerStarted","Data":"6e6eb14f6ab5ea38013b63fc66d525e5bf55c4d7d8b9689ab0da50c7786cca82"} Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.879981 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-n7vb6" event={"ID":"c216b25c-f2ed-423c-8e80-829af72648ae","Type":"ContainerStarted","Data":"8fc6e951327a0d602faa6716cc7d81ea36433cf5d77171bb96801b47815ad861"} Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.880171 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.882503 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" event={"ID":"0ca38f45-895b-4bd4-812c-033deaa9eb45","Type":"ContainerStarted","Data":"06ccfcb569b542a76083c558fa9a2f5c13116150f9a11f84895422cd8da20e87"} Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.882972 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.884315 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-wh57b" event={"ID":"d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7","Type":"ContainerStarted","Data":"a043c8ea561ba0175099bd5766bfaf4b7f1d56a87feb7cce447472a63c5d7ab5"} Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.888824 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"096839ae-3e36-4242-bfbd-e19bf1ada9f2","Type":"ContainerStarted","Data":"091da8c1402de02436b48df628f0a2b2a1c1e6b425c2d53960ef044a95b20c5b"} Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.892110 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zxbb9" event={"ID":"f188ef33-496a-425c-87a1-54d67d7b42b5","Type":"ContainerStarted","Data":"e7aa64158457ba26435d4aab645b7c67c9259768d09b6dd198b77c23c6cd9f21"} Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.892177 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zxbb9" event={"ID":"f188ef33-496a-425c-87a1-54d67d7b42b5","Type":"ContainerStarted","Data":"6d668169f485efb07c5c9abb7a7ab72cf5c83dcd21cc70b92a24bc40dd56dec2"} Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.892291 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.892366 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.894026 4935 generic.go:334] "Generic (PLEG): container finished" podID="a20a342c-d5f0-4a57-b485-5e8a122a6034" containerID="dbab7c5e0d3b633236d1a6e1d1278918a9cfed7cd531774a49457fd0e85c344c" exitCode=0 Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.894101 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-xdzvr" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.894105 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"a20a342c-d5f0-4a57-b485-5e8a122a6034","Type":"ContainerDied","Data":"dbab7c5e0d3b633236d1a6e1d1278918a9cfed7cd531774a49457fd0e85c344c"} Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.917136 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=10.227425187 podStartE2EDuration="31.917108732s" podCreationTimestamp="2025-12-01 18:52:15 +0000 UTC" firstStartedPulling="2025-12-01 18:52:24.419377602 +0000 UTC m=+1358.441006861" lastFinishedPulling="2025-12-01 18:52:46.109061137 +0000 UTC m=+1380.130690406" observedRunningTime="2025-12-01 18:52:46.900422405 +0000 UTC m=+1380.922051664" watchObservedRunningTime="2025-12-01 18:52:46.917108732 +0000 UTC m=+1380.938738001" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.931019 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-wh57b" podStartSLOduration=14.272429013 podStartE2EDuration="18.931000541s" podCreationTimestamp="2025-12-01 18:52:28 +0000 UTC" firstStartedPulling="2025-12-01 18:52:41.280589669 +0000 UTC m=+1375.302218928" lastFinishedPulling="2025-12-01 18:52:45.939161167 +0000 UTC m=+1379.960790456" observedRunningTime="2025-12-01 18:52:46.920592592 +0000 UTC m=+1380.942221891" watchObservedRunningTime="2025-12-01 18:52:46.931000541 +0000 UTC m=+1380.952629810" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.963991 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-n7vb6" podStartSLOduration=18.963974523 podStartE2EDuration="18.963974523s" podCreationTimestamp="2025-12-01 18:52:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:52:46.952637434 +0000 UTC m=+1380.974266703" watchObservedRunningTime="2025-12-01 18:52:46.963974523 +0000 UTC m=+1380.985603792" Dec 01 18:52:46 crc kubenswrapper[4935]: I1201 18:52:46.981053 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=11.781519912 podStartE2EDuration="29.981037442s" podCreationTimestamp="2025-12-01 18:52:17 +0000 UTC" firstStartedPulling="2025-12-01 18:52:27.745562064 +0000 UTC m=+1361.767191323" lastFinishedPulling="2025-12-01 18:52:45.945079594 +0000 UTC m=+1379.966708853" observedRunningTime="2025-12-01 18:52:46.973663039 +0000 UTC m=+1380.995292308" watchObservedRunningTime="2025-12-01 18:52:46.981037442 +0000 UTC m=+1381.002666701" Dec 01 18:52:47 crc kubenswrapper[4935]: I1201 18:52:47.006850 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" podStartSLOduration=19.006834137 podStartE2EDuration="19.006834137s" podCreationTimestamp="2025-12-01 18:52:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:52:47.003569964 +0000 UTC m=+1381.025199243" watchObservedRunningTime="2025-12-01 18:52:47.006834137 +0000 UTC m=+1381.028463396" Dec 01 18:52:47 crc kubenswrapper[4935]: I1201 18:52:47.075819 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-zxbb9" podStartSLOduration=15.102270293 podStartE2EDuration="30.075796296s" podCreationTimestamp="2025-12-01 18:52:17 +0000 UTC" firstStartedPulling="2025-12-01 18:52:26.405976262 +0000 UTC m=+1360.427605531" lastFinishedPulling="2025-12-01 18:52:41.379502275 +0000 UTC m=+1375.401131534" observedRunningTime="2025-12-01 18:52:47.063635003 +0000 UTC m=+1381.085264262" watchObservedRunningTime="2025-12-01 18:52:47.075796296 +0000 UTC m=+1381.097425555" Dec 01 18:52:47 crc kubenswrapper[4935]: I1201 18:52:47.091541 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-xdzvr"] Dec 01 18:52:47 crc kubenswrapper[4935]: I1201 18:52:47.100330 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-xdzvr"] Dec 01 18:52:47 crc kubenswrapper[4935]: I1201 18:52:47.926603 4935 generic.go:334] "Generic (PLEG): container finished" podID="88d842df-da24-4955-aae0-e6125a01ed0b" containerID="608916b02fcc94db32f84370a2bbf4b64818d65c6008caa924a9a4fad31611bf" exitCode=0 Dec 01 18:52:47 crc kubenswrapper[4935]: I1201 18:52:47.926702 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"88d842df-da24-4955-aae0-e6125a01ed0b","Type":"ContainerDied","Data":"608916b02fcc94db32f84370a2bbf4b64818d65c6008caa924a9a4fad31611bf"} Dec 01 18:52:47 crc kubenswrapper[4935]: I1201 18:52:47.931213 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"a20a342c-d5f0-4a57-b485-5e8a122a6034","Type":"ContainerStarted","Data":"f2952aab09bc1bcde19552d03b38e404b51b4a22dd289cce02e1ababfbfed5ce"} Dec 01 18:52:47 crc kubenswrapper[4935]: I1201 18:52:47.997902 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=32.86423259 podStartE2EDuration="41.997877076s" podCreationTimestamp="2025-12-01 18:52:06 +0000 UTC" firstStartedPulling="2025-12-01 18:52:24.264033973 +0000 UTC m=+1358.285663232" lastFinishedPulling="2025-12-01 18:52:33.397678459 +0000 UTC m=+1367.419307718" observedRunningTime="2025-12-01 18:52:47.98154772 +0000 UTC m=+1382.003176979" watchObservedRunningTime="2025-12-01 18:52:47.997877076 +0000 UTC m=+1382.019506345" Dec 01 18:52:48 crc kubenswrapper[4935]: I1201 18:52:48.207317 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 01 18:52:48 crc kubenswrapper[4935]: I1201 18:52:48.207627 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 01 18:52:48 crc kubenswrapper[4935]: I1201 18:52:48.527512 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d028f398-7da7-4877-a954-6322e304e369" path="/var/lib/kubelet/pods/d028f398-7da7-4877-a954-6322e304e369/volumes" Dec 01 18:52:48 crc kubenswrapper[4935]: I1201 18:52:48.864897 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:48 crc kubenswrapper[4935]: I1201 18:52:48.865402 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:48 crc kubenswrapper[4935]: I1201 18:52:48.938411 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:48 crc kubenswrapper[4935]: I1201 18:52:48.947286 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"88d842df-da24-4955-aae0-e6125a01ed0b","Type":"ContainerStarted","Data":"c11c098b9390c211540c07026d06d38e38562e8e869f6226fbdaabf4d5041fda"} Dec 01 18:52:49 crc kubenswrapper[4935]: I1201 18:52:49.014564 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=22.081904727 podStartE2EDuration="41.014543214s" podCreationTimestamp="2025-12-01 18:52:08 +0000 UTC" firstStartedPulling="2025-12-01 18:52:22.308860197 +0000 UTC m=+1356.330489466" lastFinishedPulling="2025-12-01 18:52:41.241498694 +0000 UTC m=+1375.263127953" observedRunningTime="2025-12-01 18:52:49.003310559 +0000 UTC m=+1383.024939838" watchObservedRunningTime="2025-12-01 18:52:49.014543214 +0000 UTC m=+1383.036172483" Dec 01 18:52:49 crc kubenswrapper[4935]: I1201 18:52:49.023808 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 01 18:52:49 crc kubenswrapper[4935]: I1201 18:52:49.987848 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:49 crc kubenswrapper[4935]: I1201 18:52:49.988096 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:49 crc kubenswrapper[4935]: I1201 18:52:49.990276 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:49 crc kubenswrapper[4935]: I1201 18:52:49.994478 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 01 18:52:50 crc kubenswrapper[4935]: I1201 18:52:50.048707 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.025414 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.068045 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.265830 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 01 18:52:51 crc kubenswrapper[4935]: E1201 18:52:51.266244 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" containerName="init" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.266260 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" containerName="init" Dec 01 18:52:51 crc kubenswrapper[4935]: E1201 18:52:51.266279 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d028f398-7da7-4877-a954-6322e304e369" containerName="init" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.266286 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d028f398-7da7-4877-a954-6322e304e369" containerName="init" Dec 01 18:52:51 crc kubenswrapper[4935]: E1201 18:52:51.266308 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d028f398-7da7-4877-a954-6322e304e369" containerName="dnsmasq-dns" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.266314 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d028f398-7da7-4877-a954-6322e304e369" containerName="dnsmasq-dns" Dec 01 18:52:51 crc kubenswrapper[4935]: E1201 18:52:51.266329 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" containerName="dnsmasq-dns" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.266335 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" containerName="dnsmasq-dns" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.266506 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="d028f398-7da7-4877-a954-6322e304e369" containerName="dnsmasq-dns" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.266531 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a28f24e-19d3-4eb5-b972-6be6e2e47ed7" containerName="dnsmasq-dns" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.267584 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.273046 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.273335 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-vgspl" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.273499 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.273631 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.291817 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.437062 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8b4a63f-27e7-4af7-897a-204468754716-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.437120 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a8b4a63f-27e7-4af7-897a-204468754716-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.437392 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a8b4a63f-27e7-4af7-897a-204468754716-scripts\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.437539 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d29b8\" (UniqueName: \"kubernetes.io/projected/a8b4a63f-27e7-4af7-897a-204468754716-kube-api-access-d29b8\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.437569 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8b4a63f-27e7-4af7-897a-204468754716-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.437614 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8b4a63f-27e7-4af7-897a-204468754716-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.438099 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8b4a63f-27e7-4af7-897a-204468754716-config\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.540326 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8b4a63f-27e7-4af7-897a-204468754716-config\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.540777 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8b4a63f-27e7-4af7-897a-204468754716-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.541751 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8b4a63f-27e7-4af7-897a-204468754716-config\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.541860 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a8b4a63f-27e7-4af7-897a-204468754716-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.541925 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a8b4a63f-27e7-4af7-897a-204468754716-scripts\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.541974 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d29b8\" (UniqueName: \"kubernetes.io/projected/a8b4a63f-27e7-4af7-897a-204468754716-kube-api-access-d29b8\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.541994 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8b4a63f-27e7-4af7-897a-204468754716-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.542019 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8b4a63f-27e7-4af7-897a-204468754716-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.542737 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a8b4a63f-27e7-4af7-897a-204468754716-scripts\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.542878 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a8b4a63f-27e7-4af7-897a-204468754716-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.546732 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8b4a63f-27e7-4af7-897a-204468754716-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.554940 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8b4a63f-27e7-4af7-897a-204468754716-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.555050 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8b4a63f-27e7-4af7-897a-204468754716-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.560823 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d29b8\" (UniqueName: \"kubernetes.io/projected/a8b4a63f-27e7-4af7-897a-204468754716-kube-api-access-d29b8\") pod \"ovn-northd-0\" (UID: \"a8b4a63f-27e7-4af7-897a-204468754716\") " pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.599815 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.880924 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-4c2jr"] Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.881862 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" podUID="0ca38f45-895b-4bd4-812c-033deaa9eb45" containerName="dnsmasq-dns" containerID="cri-o://06ccfcb569b542a76083c558fa9a2f5c13116150f9a11f84895422cd8da20e87" gracePeriod=10 Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.885022 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.922231 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-cjlh4"] Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.923996 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:51 crc kubenswrapper[4935]: I1201 18:52:51.938346 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-cjlh4"] Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.072336 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.073595 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.073839 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvjrw\" (UniqueName: \"kubernetes.io/projected/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-kube-api-access-nvjrw\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.074084 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.074316 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-config\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.176511 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.176628 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-config\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.176673 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.176699 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.176779 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvjrw\" (UniqueName: \"kubernetes.io/projected/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-kube-api-access-nvjrw\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.177753 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-config\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.178704 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.179409 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.179440 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.204897 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvjrw\" (UniqueName: \"kubernetes.io/projected/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-kube-api-access-nvjrw\") pod \"dnsmasq-dns-b8fbc5445-cjlh4\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.231951 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.260592 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.348088 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.533482 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 01 18:52:52 crc kubenswrapper[4935]: I1201 18:52:52.867062 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-cjlh4"] Dec 01 18:52:52 crc kubenswrapper[4935]: W1201 18:52:52.869264 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f14ba90_8960_4a1c_b3fb_f740cb4a6650.slice/crio-2d8c481197013e5a651dff90b06ed0407f1287c7504cdd78ed8dbe4aabd651d1 WatchSource:0}: Error finding container 2d8c481197013e5a651dff90b06ed0407f1287c7504cdd78ed8dbe4aabd651d1: Status 404 returned error can't find the container with id 2d8c481197013e5a651dff90b06ed0407f1287c7504cdd78ed8dbe4aabd651d1 Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.069099 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" event={"ID":"5f14ba90-8960-4a1c-b3fb-f740cb4a6650","Type":"ContainerStarted","Data":"2d8c481197013e5a651dff90b06ed0407f1287c7504cdd78ed8dbe4aabd651d1"} Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.071047 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a8b4a63f-27e7-4af7-897a-204468754716","Type":"ContainerStarted","Data":"fd3c84caf367c6f614156f87dd78713ebee082bc999fda254004d82682b28b19"} Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.075885 4935 generic.go:334] "Generic (PLEG): container finished" podID="0ca38f45-895b-4bd4-812c-033deaa9eb45" containerID="06ccfcb569b542a76083c558fa9a2f5c13116150f9a11f84895422cd8da20e87" exitCode=0 Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.075963 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" event={"ID":"0ca38f45-895b-4bd4-812c-033deaa9eb45","Type":"ContainerDied","Data":"06ccfcb569b542a76083c558fa9a2f5c13116150f9a11f84895422cd8da20e87"} Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.077846 4935 generic.go:334] "Generic (PLEG): container finished" podID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerID="b0248489c60e5fc8835d98c149d9d39763045268826d86eb62e5e34dbc7a8977" exitCode=0 Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.077898 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2046ae6b-b1cd-421d-a4b0-686e1e29c407","Type":"ContainerDied","Data":"b0248489c60e5fc8835d98c149d9d39763045268826d86eb62e5e34dbc7a8977"} Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.113573 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.127191 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.131174 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.133250 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-dbtgk" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.133609 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.133874 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.134086 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.208935 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e0ee2844-1713-4b15-81f5-138cbc14fe03-cache\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.209201 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.209245 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e0ee2844-1713-4b15-81f5-138cbc14fe03-lock\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.209292 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sncvn\" (UniqueName: \"kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-kube-api-access-sncvn\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.209381 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.313386 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e0ee2844-1713-4b15-81f5-138cbc14fe03-cache\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.313690 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.313766 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e0ee2844-1713-4b15-81f5-138cbc14fe03-lock\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.313906 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e0ee2844-1713-4b15-81f5-138cbc14fe03-cache\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.313932 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sncvn\" (UniqueName: \"kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-kube-api-access-sncvn\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.314200 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.314371 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e0ee2844-1713-4b15-81f5-138cbc14fe03-lock\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.314680 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: E1201 18:52:53.314824 4935 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 18:52:53 crc kubenswrapper[4935]: E1201 18:52:53.314890 4935 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 18:52:53 crc kubenswrapper[4935]: E1201 18:52:53.314989 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift podName:e0ee2844-1713-4b15-81f5-138cbc14fe03 nodeName:}" failed. No retries permitted until 2025-12-01 18:52:53.814973654 +0000 UTC m=+1387.836602913 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift") pod "swift-storage-0" (UID: "e0ee2844-1713-4b15-81f5-138cbc14fe03") : configmap "swift-ring-files" not found Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.335226 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sncvn\" (UniqueName: \"kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-kube-api-access-sncvn\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.374810 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.824920 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:53 crc kubenswrapper[4935]: E1201 18:52:53.825171 4935 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 18:52:53 crc kubenswrapper[4935]: E1201 18:52:53.825430 4935 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 18:52:53 crc kubenswrapper[4935]: E1201 18:52:53.825489 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift podName:e0ee2844-1713-4b15-81f5-138cbc14fe03 nodeName:}" failed. No retries permitted until 2025-12-01 18:52:54.825469665 +0000 UTC m=+1388.847098924 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift") pod "swift-storage-0" (UID: "e0ee2844-1713-4b15-81f5-138cbc14fe03") : configmap "swift-ring-files" not found Dec 01 18:52:53 crc kubenswrapper[4935]: I1201 18:52:53.858063 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" podUID="0ca38f45-895b-4bd4-812c-033deaa9eb45" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.141:5353: connect: connection refused" Dec 01 18:52:54 crc kubenswrapper[4935]: I1201 18:52:54.152600 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:52:54 crc kubenswrapper[4935]: I1201 18:52:54.346678 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:52:54 crc kubenswrapper[4935]: I1201 18:52:54.346749 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:52:54 crc kubenswrapper[4935]: I1201 18:52:54.346807 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:52:54 crc kubenswrapper[4935]: I1201 18:52:54.350556 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8d43b47ae64729f61d960fc5685829c02da961e532465f8f3fc4e3129716002b"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 18:52:54 crc kubenswrapper[4935]: I1201 18:52:54.350635 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://8d43b47ae64729f61d960fc5685829c02da961e532465f8f3fc4e3129716002b" gracePeriod=600 Dec 01 18:52:54 crc kubenswrapper[4935]: I1201 18:52:54.825685 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:54 crc kubenswrapper[4935]: I1201 18:52:54.848974 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:54 crc kubenswrapper[4935]: E1201 18:52:54.849616 4935 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 18:52:54 crc kubenswrapper[4935]: E1201 18:52:54.849636 4935 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 18:52:54 crc kubenswrapper[4935]: E1201 18:52:54.849682 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift podName:e0ee2844-1713-4b15-81f5-138cbc14fe03 nodeName:}" failed. No retries permitted until 2025-12-01 18:52:56.849666672 +0000 UTC m=+1390.871295931 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift") pod "swift-storage-0" (UID: "e0ee2844-1713-4b15-81f5-138cbc14fe03") : configmap "swift-ring-files" not found Dec 01 18:52:54 crc kubenswrapper[4935]: I1201 18:52:54.950606 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-ovsdbserver-sb\") pod \"0ca38f45-895b-4bd4-812c-033deaa9eb45\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " Dec 01 18:52:54 crc kubenswrapper[4935]: I1201 18:52:54.950733 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-config\") pod \"0ca38f45-895b-4bd4-812c-033deaa9eb45\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " Dec 01 18:52:54 crc kubenswrapper[4935]: I1201 18:52:54.950810 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttpfk\" (UniqueName: \"kubernetes.io/projected/0ca38f45-895b-4bd4-812c-033deaa9eb45-kube-api-access-ttpfk\") pod \"0ca38f45-895b-4bd4-812c-033deaa9eb45\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " Dec 01 18:52:54 crc kubenswrapper[4935]: I1201 18:52:54.950867 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-dns-svc\") pod \"0ca38f45-895b-4bd4-812c-033deaa9eb45\" (UID: \"0ca38f45-895b-4bd4-812c-033deaa9eb45\") " Dec 01 18:52:54 crc kubenswrapper[4935]: I1201 18:52:54.956974 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ca38f45-895b-4bd4-812c-033deaa9eb45-kube-api-access-ttpfk" (OuterVolumeSpecName: "kube-api-access-ttpfk") pod "0ca38f45-895b-4bd4-812c-033deaa9eb45" (UID: "0ca38f45-895b-4bd4-812c-033deaa9eb45"). InnerVolumeSpecName "kube-api-access-ttpfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.001013 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0ca38f45-895b-4bd4-812c-033deaa9eb45" (UID: "0ca38f45-895b-4bd4-812c-033deaa9eb45"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.001566 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0ca38f45-895b-4bd4-812c-033deaa9eb45" (UID: "0ca38f45-895b-4bd4-812c-033deaa9eb45"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.012972 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-config" (OuterVolumeSpecName: "config") pod "0ca38f45-895b-4bd4-812c-033deaa9eb45" (UID: "0ca38f45-895b-4bd4-812c-033deaa9eb45"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.053365 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.053397 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.053409 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ca38f45-895b-4bd4-812c-033deaa9eb45-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.053418 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttpfk\" (UniqueName: \"kubernetes.io/projected/0ca38f45-895b-4bd4-812c-033deaa9eb45-kube-api-access-ttpfk\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.098807 4935 generic.go:334] "Generic (PLEG): container finished" podID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" containerID="83213db29832ab816fb4e977406624c01233ff40ba7fec2539457866fe31b07e" exitCode=0 Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.098892 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" event={"ID":"5f14ba90-8960-4a1c-b3fb-f740cb4a6650","Type":"ContainerDied","Data":"83213db29832ab816fb4e977406624c01233ff40ba7fec2539457866fe31b07e"} Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.104437 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="8d43b47ae64729f61d960fc5685829c02da961e532465f8f3fc4e3129716002b" exitCode=0 Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.104509 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"8d43b47ae64729f61d960fc5685829c02da961e532465f8f3fc4e3129716002b"} Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.104542 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842"} Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.104561 4935 scope.go:117] "RemoveContainer" containerID="89d0c184ee1dbdba2189f946ff97ea233b33f6dde95b0c4dc3f41a9fad7d86ae" Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.113504 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" event={"ID":"0ca38f45-895b-4bd4-812c-033deaa9eb45","Type":"ContainerDied","Data":"2a53802676efc7c4dcdf14d97caa46fee16a21672ca2d4e75e3f2c0780b07a71"} Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.113566 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-4c2jr" Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.167596 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-4c2jr"] Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.174839 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-4c2jr"] Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.362570 4935 scope.go:117] "RemoveContainer" containerID="06ccfcb569b542a76083c558fa9a2f5c13116150f9a11f84895422cd8da20e87" Dec 01 18:52:55 crc kubenswrapper[4935]: I1201 18:52:55.419189 4935 scope.go:117] "RemoveContainer" containerID="b7d9f1dc9384ff4847cf252de0c4e95135774828145a5c20d570a789002c4646" Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.124656 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a8b4a63f-27e7-4af7-897a-204468754716","Type":"ContainerStarted","Data":"8e1ff2c9f09a4c5eea9c41d3dd24649f9609a3963f62c73278f151ef98bc90d4"} Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.125113 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a8b4a63f-27e7-4af7-897a-204468754716","Type":"ContainerStarted","Data":"d1740c20bbaf2677db8e96c7d6ef9c3e09ac405f611e843aa60b7f8b6c0f1ede"} Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.125132 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.129067 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" event={"ID":"5f14ba90-8960-4a1c-b3fb-f740cb4a6650","Type":"ContainerStarted","Data":"f935a6d4eb7ad0ab42ad4156cf94aa42adaf17fb576bc8ae7aec2992225e7a41"} Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.129339 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.154320 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.973325526 podStartE2EDuration="5.15430093s" podCreationTimestamp="2025-12-01 18:52:51 +0000 UTC" firstStartedPulling="2025-12-01 18:52:52.240796447 +0000 UTC m=+1386.262425706" lastFinishedPulling="2025-12-01 18:52:55.421771851 +0000 UTC m=+1389.443401110" observedRunningTime="2025-12-01 18:52:56.146871745 +0000 UTC m=+1390.168501004" watchObservedRunningTime="2025-12-01 18:52:56.15430093 +0000 UTC m=+1390.175930189" Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.175643 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" podStartSLOduration=5.175609973 podStartE2EDuration="5.175609973s" podCreationTimestamp="2025-12-01 18:52:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:52:56.174277111 +0000 UTC m=+1390.195906370" watchObservedRunningTime="2025-12-01 18:52:56.175609973 +0000 UTC m=+1390.197239242" Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.537771 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ca38f45-895b-4bd4-812c-033deaa9eb45" path="/var/lib/kubelet/pods/0ca38f45-895b-4bd4-812c-033deaa9eb45/volumes" Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.910659 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:52:56 crc kubenswrapper[4935]: E1201 18:52:56.911553 4935 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 18:52:56 crc kubenswrapper[4935]: E1201 18:52:56.911594 4935 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 18:52:56 crc kubenswrapper[4935]: E1201 18:52:56.911709 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift podName:e0ee2844-1713-4b15-81f5-138cbc14fe03 nodeName:}" failed. No retries permitted until 2025-12-01 18:53:00.911676314 +0000 UTC m=+1394.933305623 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift") pod "swift-storage-0" (UID: "e0ee2844-1713-4b15-81f5-138cbc14fe03") : configmap "swift-ring-files" not found Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.992125 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-2v2t8"] Dec 01 18:52:56 crc kubenswrapper[4935]: E1201 18:52:56.992561 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ca38f45-895b-4bd4-812c-033deaa9eb45" containerName="init" Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.992577 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ca38f45-895b-4bd4-812c-033deaa9eb45" containerName="init" Dec 01 18:52:56 crc kubenswrapper[4935]: E1201 18:52:56.992590 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ca38f45-895b-4bd4-812c-033deaa9eb45" containerName="dnsmasq-dns" Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.992597 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ca38f45-895b-4bd4-812c-033deaa9eb45" containerName="dnsmasq-dns" Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.992781 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ca38f45-895b-4bd4-812c-033deaa9eb45" containerName="dnsmasq-dns" Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.993432 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.997689 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.997932 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 01 18:52:56 crc kubenswrapper[4935]: I1201 18:52:56.998045 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.021778 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-2v2t8"] Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.120751 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-swiftconf\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.120816 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-combined-ca-bundle\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.120841 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/42f244f1-1b31-4831-8f12-f95ef0199c7c-ring-data-devices\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.120903 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-dispersionconf\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.121030 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42f244f1-1b31-4831-8f12-f95ef0199c7c-scripts\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.121059 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/42f244f1-1b31-4831-8f12-f95ef0199c7c-etc-swift\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.121239 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6442\" (UniqueName: \"kubernetes.io/projected/42f244f1-1b31-4831-8f12-f95ef0199c7c-kube-api-access-j6442\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.223430 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-combined-ca-bundle\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.223491 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/42f244f1-1b31-4831-8f12-f95ef0199c7c-ring-data-devices\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.223528 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-dispersionconf\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.223751 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42f244f1-1b31-4831-8f12-f95ef0199c7c-scripts\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.223799 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/42f244f1-1b31-4831-8f12-f95ef0199c7c-etc-swift\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.223872 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6442\" (UniqueName: \"kubernetes.io/projected/42f244f1-1b31-4831-8f12-f95ef0199c7c-kube-api-access-j6442\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.223949 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-swiftconf\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.224724 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/42f244f1-1b31-4831-8f12-f95ef0199c7c-ring-data-devices\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.224768 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42f244f1-1b31-4831-8f12-f95ef0199c7c-scripts\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.225257 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/42f244f1-1b31-4831-8f12-f95ef0199c7c-etc-swift\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.231865 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-swiftconf\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.235400 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-dispersionconf\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.236103 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-combined-ca-bundle\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.245257 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6442\" (UniqueName: \"kubernetes.io/projected/42f244f1-1b31-4831-8f12-f95ef0199c7c-kube-api-access-j6442\") pod \"swift-ring-rebalance-2v2t8\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.316810 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:52:57 crc kubenswrapper[4935]: I1201 18:52:57.794438 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-2v2t8"] Dec 01 18:52:58 crc kubenswrapper[4935]: I1201 18:52:58.153758 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2v2t8" event={"ID":"42f244f1-1b31-4831-8f12-f95ef0199c7c","Type":"ContainerStarted","Data":"df3fe67126854f7470257339f8ff9654d766f93b50951e9cf59c772d0dca8719"} Dec 01 18:52:58 crc kubenswrapper[4935]: I1201 18:52:58.192709 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:58 crc kubenswrapper[4935]: I1201 18:52:58.303611 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 01 18:52:58 crc kubenswrapper[4935]: I1201 18:52:58.805196 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-66ff47d4bb-794fx" podUID="f74d6018-3e94-4935-8e2b-de23ecdadecc" containerName="console" containerID="cri-o://2e075dd5f9a4b265cc14bd97b8bf26aa140bc863ebf805056878e0968dbfd8d3" gracePeriod=15 Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.168382 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd","Type":"ContainerStarted","Data":"f7c92429b425fad38535c3249acef8b011f34fabd83712ff69c7e116667a66ac"} Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.169378 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.173311 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-66ff47d4bb-794fx_f74d6018-3e94-4935-8e2b-de23ecdadecc/console/0.log" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.173589 4935 generic.go:334] "Generic (PLEG): container finished" podID="f74d6018-3e94-4935-8e2b-de23ecdadecc" containerID="2e075dd5f9a4b265cc14bd97b8bf26aa140bc863ebf805056878e0968dbfd8d3" exitCode=2 Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.173979 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-66ff47d4bb-794fx" event={"ID":"f74d6018-3e94-4935-8e2b-de23ecdadecc","Type":"ContainerDied","Data":"2e075dd5f9a4b265cc14bd97b8bf26aa140bc863ebf805056878e0968dbfd8d3"} Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.199038 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=13.641322205 podStartE2EDuration="48.199019507s" podCreationTimestamp="2025-12-01 18:52:11 +0000 UTC" firstStartedPulling="2025-12-01 18:52:24.171033004 +0000 UTC m=+1358.192662263" lastFinishedPulling="2025-12-01 18:52:58.728730306 +0000 UTC m=+1392.750359565" observedRunningTime="2025-12-01 18:52:59.191874432 +0000 UTC m=+1393.213503701" watchObservedRunningTime="2025-12-01 18:52:59.199019507 +0000 UTC m=+1393.220648786" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.404681 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-66ff47d4bb-794fx_f74d6018-3e94-4935-8e2b-de23ecdadecc/console/0.log" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.404763 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.490552 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-config\") pod \"f74d6018-3e94-4935-8e2b-de23ecdadecc\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.490639 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-oauth-serving-cert\") pod \"f74d6018-3e94-4935-8e2b-de23ecdadecc\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.490667 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-oauth-config\") pod \"f74d6018-3e94-4935-8e2b-de23ecdadecc\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.490706 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n2nd\" (UniqueName: \"kubernetes.io/projected/f74d6018-3e94-4935-8e2b-de23ecdadecc-kube-api-access-8n2nd\") pod \"f74d6018-3e94-4935-8e2b-de23ecdadecc\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.490749 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-serving-cert\") pod \"f74d6018-3e94-4935-8e2b-de23ecdadecc\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.490770 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-trusted-ca-bundle\") pod \"f74d6018-3e94-4935-8e2b-de23ecdadecc\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.490853 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-service-ca\") pod \"f74d6018-3e94-4935-8e2b-de23ecdadecc\" (UID: \"f74d6018-3e94-4935-8e2b-de23ecdadecc\") " Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.491590 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "f74d6018-3e94-4935-8e2b-de23ecdadecc" (UID: "f74d6018-3e94-4935-8e2b-de23ecdadecc"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.491815 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-service-ca" (OuterVolumeSpecName: "service-ca") pod "f74d6018-3e94-4935-8e2b-de23ecdadecc" (UID: "f74d6018-3e94-4935-8e2b-de23ecdadecc"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.491958 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "f74d6018-3e94-4935-8e2b-de23ecdadecc" (UID: "f74d6018-3e94-4935-8e2b-de23ecdadecc"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.492080 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-config" (OuterVolumeSpecName: "console-config") pod "f74d6018-3e94-4935-8e2b-de23ecdadecc" (UID: "f74d6018-3e94-4935-8e2b-de23ecdadecc"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.501904 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "f74d6018-3e94-4935-8e2b-de23ecdadecc" (UID: "f74d6018-3e94-4935-8e2b-de23ecdadecc"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.505271 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "f74d6018-3e94-4935-8e2b-de23ecdadecc" (UID: "f74d6018-3e94-4935-8e2b-de23ecdadecc"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.514042 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f74d6018-3e94-4935-8e2b-de23ecdadecc-kube-api-access-8n2nd" (OuterVolumeSpecName: "kube-api-access-8n2nd") pod "f74d6018-3e94-4935-8e2b-de23ecdadecc" (UID: "f74d6018-3e94-4935-8e2b-de23ecdadecc"). InnerVolumeSpecName "kube-api-access-8n2nd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.564888 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-605e-account-create-update-bfsft"] Dec 01 18:52:59 crc kubenswrapper[4935]: E1201 18:52:59.565368 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f74d6018-3e94-4935-8e2b-de23ecdadecc" containerName="console" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.565384 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f74d6018-3e94-4935-8e2b-de23ecdadecc" containerName="console" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.565587 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f74d6018-3e94-4935-8e2b-de23ecdadecc" containerName="console" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.566273 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-605e-account-create-update-bfsft" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.568436 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.575189 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-wph7x"] Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.576526 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wph7x" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.584471 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-605e-account-create-update-bfsft"] Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.593013 4935 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.593036 4935 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.593047 4935 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.593058 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n2nd\" (UniqueName: \"kubernetes.io/projected/f74d6018-3e94-4935-8e2b-de23ecdadecc-kube-api-access-8n2nd\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.593071 4935 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f74d6018-3e94-4935-8e2b-de23ecdadecc-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.593081 4935 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.593094 4935 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f74d6018-3e94-4935-8e2b-de23ecdadecc-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.595284 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-wph7x"] Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.695021 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtnpl\" (UniqueName: \"kubernetes.io/projected/8ba93a6b-9a94-4dc1-8bb2-392863cb64ba-kube-api-access-dtnpl\") pod \"keystone-605e-account-create-update-bfsft\" (UID: \"8ba93a6b-9a94-4dc1-8bb2-392863cb64ba\") " pod="openstack/keystone-605e-account-create-update-bfsft" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.695320 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w895j\" (UniqueName: \"kubernetes.io/projected/475f0637-3250-46ef-bafa-c3a57c5780a2-kube-api-access-w895j\") pod \"keystone-db-create-wph7x\" (UID: \"475f0637-3250-46ef-bafa-c3a57c5780a2\") " pod="openstack/keystone-db-create-wph7x" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.695386 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/475f0637-3250-46ef-bafa-c3a57c5780a2-operator-scripts\") pod \"keystone-db-create-wph7x\" (UID: \"475f0637-3250-46ef-bafa-c3a57c5780a2\") " pod="openstack/keystone-db-create-wph7x" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.695425 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8ba93a6b-9a94-4dc1-8bb2-392863cb64ba-operator-scripts\") pod \"keystone-605e-account-create-update-bfsft\" (UID: \"8ba93a6b-9a94-4dc1-8bb2-392863cb64ba\") " pod="openstack/keystone-605e-account-create-update-bfsft" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.757626 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-sgqs7"] Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.759193 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-sgqs7" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.772036 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-sgqs7"] Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.798256 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w895j\" (UniqueName: \"kubernetes.io/projected/475f0637-3250-46ef-bafa-c3a57c5780a2-kube-api-access-w895j\") pod \"keystone-db-create-wph7x\" (UID: \"475f0637-3250-46ef-bafa-c3a57c5780a2\") " pod="openstack/keystone-db-create-wph7x" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.798357 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/475f0637-3250-46ef-bafa-c3a57c5780a2-operator-scripts\") pod \"keystone-db-create-wph7x\" (UID: \"475f0637-3250-46ef-bafa-c3a57c5780a2\") " pod="openstack/keystone-db-create-wph7x" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.799302 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/475f0637-3250-46ef-bafa-c3a57c5780a2-operator-scripts\") pod \"keystone-db-create-wph7x\" (UID: \"475f0637-3250-46ef-bafa-c3a57c5780a2\") " pod="openstack/keystone-db-create-wph7x" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.799403 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8ba93a6b-9a94-4dc1-8bb2-392863cb64ba-operator-scripts\") pod \"keystone-605e-account-create-update-bfsft\" (UID: \"8ba93a6b-9a94-4dc1-8bb2-392863cb64ba\") " pod="openstack/keystone-605e-account-create-update-bfsft" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.800465 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtnpl\" (UniqueName: \"kubernetes.io/projected/8ba93a6b-9a94-4dc1-8bb2-392863cb64ba-kube-api-access-dtnpl\") pod \"keystone-605e-account-create-update-bfsft\" (UID: \"8ba93a6b-9a94-4dc1-8bb2-392863cb64ba\") " pod="openstack/keystone-605e-account-create-update-bfsft" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.800600 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8ba93a6b-9a94-4dc1-8bb2-392863cb64ba-operator-scripts\") pod \"keystone-605e-account-create-update-bfsft\" (UID: \"8ba93a6b-9a94-4dc1-8bb2-392863cb64ba\") " pod="openstack/keystone-605e-account-create-update-bfsft" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.817010 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w895j\" (UniqueName: \"kubernetes.io/projected/475f0637-3250-46ef-bafa-c3a57c5780a2-kube-api-access-w895j\") pod \"keystone-db-create-wph7x\" (UID: \"475f0637-3250-46ef-bafa-c3a57c5780a2\") " pod="openstack/keystone-db-create-wph7x" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.823514 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtnpl\" (UniqueName: \"kubernetes.io/projected/8ba93a6b-9a94-4dc1-8bb2-392863cb64ba-kube-api-access-dtnpl\") pod \"keystone-605e-account-create-update-bfsft\" (UID: \"8ba93a6b-9a94-4dc1-8bb2-392863cb64ba\") " pod="openstack/keystone-605e-account-create-update-bfsft" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.859529 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-2e51-account-create-update-hwrgm"] Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.861385 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-2e51-account-create-update-hwrgm" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.863794 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.867508 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-2e51-account-create-update-hwrgm"] Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.903386 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2df55b0-7b7c-4330-8c63-c1fdc708b950-operator-scripts\") pod \"placement-db-create-sgqs7\" (UID: \"c2df55b0-7b7c-4330-8c63-c1fdc708b950\") " pod="openstack/placement-db-create-sgqs7" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.903432 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fb89l\" (UniqueName: \"kubernetes.io/projected/c2df55b0-7b7c-4330-8c63-c1fdc708b950-kube-api-access-fb89l\") pod \"placement-db-create-sgqs7\" (UID: \"c2df55b0-7b7c-4330-8c63-c1fdc708b950\") " pod="openstack/placement-db-create-sgqs7" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.903887 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-605e-account-create-update-bfsft" Dec 01 18:52:59 crc kubenswrapper[4935]: I1201 18:52:59.914585 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wph7x" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.006129 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2df55b0-7b7c-4330-8c63-c1fdc708b950-operator-scripts\") pod \"placement-db-create-sgqs7\" (UID: \"c2df55b0-7b7c-4330-8c63-c1fdc708b950\") " pod="openstack/placement-db-create-sgqs7" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.006223 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fb89l\" (UniqueName: \"kubernetes.io/projected/c2df55b0-7b7c-4330-8c63-c1fdc708b950-kube-api-access-fb89l\") pod \"placement-db-create-sgqs7\" (UID: \"c2df55b0-7b7c-4330-8c63-c1fdc708b950\") " pod="openstack/placement-db-create-sgqs7" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.006294 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffd39947-9d32-4aca-ac65-83d13c6fc3d9-operator-scripts\") pod \"placement-2e51-account-create-update-hwrgm\" (UID: \"ffd39947-9d32-4aca-ac65-83d13c6fc3d9\") " pod="openstack/placement-2e51-account-create-update-hwrgm" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.006377 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spzs5\" (UniqueName: \"kubernetes.io/projected/ffd39947-9d32-4aca-ac65-83d13c6fc3d9-kube-api-access-spzs5\") pod \"placement-2e51-account-create-update-hwrgm\" (UID: \"ffd39947-9d32-4aca-ac65-83d13c6fc3d9\") " pod="openstack/placement-2e51-account-create-update-hwrgm" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.006990 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2df55b0-7b7c-4330-8c63-c1fdc708b950-operator-scripts\") pod \"placement-db-create-sgqs7\" (UID: \"c2df55b0-7b7c-4330-8c63-c1fdc708b950\") " pod="openstack/placement-db-create-sgqs7" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.031546 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fb89l\" (UniqueName: \"kubernetes.io/projected/c2df55b0-7b7c-4330-8c63-c1fdc708b950-kube-api-access-fb89l\") pod \"placement-db-create-sgqs7\" (UID: \"c2df55b0-7b7c-4330-8c63-c1fdc708b950\") " pod="openstack/placement-db-create-sgqs7" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.108166 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffd39947-9d32-4aca-ac65-83d13c6fc3d9-operator-scripts\") pod \"placement-2e51-account-create-update-hwrgm\" (UID: \"ffd39947-9d32-4aca-ac65-83d13c6fc3d9\") " pod="openstack/placement-2e51-account-create-update-hwrgm" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.108261 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spzs5\" (UniqueName: \"kubernetes.io/projected/ffd39947-9d32-4aca-ac65-83d13c6fc3d9-kube-api-access-spzs5\") pod \"placement-2e51-account-create-update-hwrgm\" (UID: \"ffd39947-9d32-4aca-ac65-83d13c6fc3d9\") " pod="openstack/placement-2e51-account-create-update-hwrgm" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.108997 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffd39947-9d32-4aca-ac65-83d13c6fc3d9-operator-scripts\") pod \"placement-2e51-account-create-update-hwrgm\" (UID: \"ffd39947-9d32-4aca-ac65-83d13c6fc3d9\") " pod="openstack/placement-2e51-account-create-update-hwrgm" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.127872 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spzs5\" (UniqueName: \"kubernetes.io/projected/ffd39947-9d32-4aca-ac65-83d13c6fc3d9-kube-api-access-spzs5\") pod \"placement-2e51-account-create-update-hwrgm\" (UID: \"ffd39947-9d32-4aca-ac65-83d13c6fc3d9\") " pod="openstack/placement-2e51-account-create-update-hwrgm" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.194840 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-sgqs7" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.196406 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-2e51-account-create-update-hwrgm" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.227614 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-pq64s"] Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.229099 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-pq64s"] Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.229187 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-pq64s" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.241888 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-66ff47d4bb-794fx_f74d6018-3e94-4935-8e2b-de23ecdadecc/console/0.log" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.241986 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-66ff47d4bb-794fx" event={"ID":"f74d6018-3e94-4935-8e2b-de23ecdadecc","Type":"ContainerDied","Data":"19a146740b92e48ab3882afb3e6ce01e7e0128658178f04438a2def632e6c522"} Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.242042 4935 scope.go:117] "RemoveContainer" containerID="2e075dd5f9a4b265cc14bd97b8bf26aa140bc863ebf805056878e0968dbfd8d3" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.242071 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-66ff47d4bb-794fx" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.277270 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-c9ba-account-create-update-jm7xw"] Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.279567 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c9ba-account-create-update-jm7xw" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.284966 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.319628 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wqxr\" (UniqueName: \"kubernetes.io/projected/00b6178d-eeba-450b-b8e6-289bba7db372-kube-api-access-9wqxr\") pod \"glance-db-create-pq64s\" (UID: \"00b6178d-eeba-450b-b8e6-289bba7db372\") " pod="openstack/glance-db-create-pq64s" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.319799 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00b6178d-eeba-450b-b8e6-289bba7db372-operator-scripts\") pod \"glance-db-create-pq64s\" (UID: \"00b6178d-eeba-450b-b8e6-289bba7db372\") " pod="openstack/glance-db-create-pq64s" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.322916 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-c9ba-account-create-update-jm7xw"] Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.331044 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-66ff47d4bb-794fx"] Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.348544 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-66ff47d4bb-794fx"] Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.421890 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wqxr\" (UniqueName: \"kubernetes.io/projected/00b6178d-eeba-450b-b8e6-289bba7db372-kube-api-access-9wqxr\") pod \"glance-db-create-pq64s\" (UID: \"00b6178d-eeba-450b-b8e6-289bba7db372\") " pod="openstack/glance-db-create-pq64s" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.421960 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00b6178d-eeba-450b-b8e6-289bba7db372-operator-scripts\") pod \"glance-db-create-pq64s\" (UID: \"00b6178d-eeba-450b-b8e6-289bba7db372\") " pod="openstack/glance-db-create-pq64s" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.422122 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d1fbcb0-5eaa-4512-bae5-0759240427f7-operator-scripts\") pod \"glance-c9ba-account-create-update-jm7xw\" (UID: \"7d1fbcb0-5eaa-4512-bae5-0759240427f7\") " pod="openstack/glance-c9ba-account-create-update-jm7xw" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.422246 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mr2n\" (UniqueName: \"kubernetes.io/projected/7d1fbcb0-5eaa-4512-bae5-0759240427f7-kube-api-access-7mr2n\") pod \"glance-c9ba-account-create-update-jm7xw\" (UID: \"7d1fbcb0-5eaa-4512-bae5-0759240427f7\") " pod="openstack/glance-c9ba-account-create-update-jm7xw" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.422926 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00b6178d-eeba-450b-b8e6-289bba7db372-operator-scripts\") pod \"glance-db-create-pq64s\" (UID: \"00b6178d-eeba-450b-b8e6-289bba7db372\") " pod="openstack/glance-db-create-pq64s" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.443336 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wqxr\" (UniqueName: \"kubernetes.io/projected/00b6178d-eeba-450b-b8e6-289bba7db372-kube-api-access-9wqxr\") pod \"glance-db-create-pq64s\" (UID: \"00b6178d-eeba-450b-b8e6-289bba7db372\") " pod="openstack/glance-db-create-pq64s" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.523746 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d1fbcb0-5eaa-4512-bae5-0759240427f7-operator-scripts\") pod \"glance-c9ba-account-create-update-jm7xw\" (UID: \"7d1fbcb0-5eaa-4512-bae5-0759240427f7\") " pod="openstack/glance-c9ba-account-create-update-jm7xw" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.523816 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mr2n\" (UniqueName: \"kubernetes.io/projected/7d1fbcb0-5eaa-4512-bae5-0759240427f7-kube-api-access-7mr2n\") pod \"glance-c9ba-account-create-update-jm7xw\" (UID: \"7d1fbcb0-5eaa-4512-bae5-0759240427f7\") " pod="openstack/glance-c9ba-account-create-update-jm7xw" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.527286 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d1fbcb0-5eaa-4512-bae5-0759240427f7-operator-scripts\") pod \"glance-c9ba-account-create-update-jm7xw\" (UID: \"7d1fbcb0-5eaa-4512-bae5-0759240427f7\") " pod="openstack/glance-c9ba-account-create-update-jm7xw" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.536305 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f74d6018-3e94-4935-8e2b-de23ecdadecc" path="/var/lib/kubelet/pods/f74d6018-3e94-4935-8e2b-de23ecdadecc/volumes" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.540182 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mr2n\" (UniqueName: \"kubernetes.io/projected/7d1fbcb0-5eaa-4512-bae5-0759240427f7-kube-api-access-7mr2n\") pod \"glance-c9ba-account-create-update-jm7xw\" (UID: \"7d1fbcb0-5eaa-4512-bae5-0759240427f7\") " pod="openstack/glance-c9ba-account-create-update-jm7xw" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.556262 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-pq64s" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.609555 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c9ba-account-create-update-jm7xw" Dec 01 18:53:00 crc kubenswrapper[4935]: I1201 18:53:00.934789 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:53:00 crc kubenswrapper[4935]: E1201 18:53:00.935076 4935 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 18:53:00 crc kubenswrapper[4935]: E1201 18:53:00.935310 4935 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 18:53:00 crc kubenswrapper[4935]: E1201 18:53:00.935402 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift podName:e0ee2844-1713-4b15-81f5-138cbc14fe03 nodeName:}" failed. No retries permitted until 2025-12-01 18:53:08.935372799 +0000 UTC m=+1402.957002088 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift") pod "swift-storage-0" (UID: "e0ee2844-1713-4b15-81f5-138cbc14fe03") : configmap "swift-ring-files" not found Dec 01 18:53:01 crc kubenswrapper[4935]: I1201 18:53:01.873346 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mysqld-exporter-openstack-db-create-8pnk2"] Dec 01 18:53:01 crc kubenswrapper[4935]: I1201 18:53:01.875506 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" Dec 01 18:53:01 crc kubenswrapper[4935]: I1201 18:53:01.899734 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-openstack-db-create-8pnk2"] Dec 01 18:53:01 crc kubenswrapper[4935]: I1201 18:53:01.961980 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/086eb35c-13fa-4941-8812-712ca0a53fdc-operator-scripts\") pod \"mysqld-exporter-openstack-db-create-8pnk2\" (UID: \"086eb35c-13fa-4941-8812-712ca0a53fdc\") " pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" Dec 01 18:53:01 crc kubenswrapper[4935]: I1201 18:53:01.962108 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5r9c\" (UniqueName: \"kubernetes.io/projected/086eb35c-13fa-4941-8812-712ca0a53fdc-kube-api-access-t5r9c\") pod \"mysqld-exporter-openstack-db-create-8pnk2\" (UID: \"086eb35c-13fa-4941-8812-712ca0a53fdc\") " pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.006762 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mysqld-exporter-68a7-account-create-update-dpgxj"] Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.008622 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.014614 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"mysqld-exporter-openstack-db-secret" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.023033 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-68a7-account-create-update-dpgxj"] Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.064075 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/086eb35c-13fa-4941-8812-712ca0a53fdc-operator-scripts\") pod \"mysqld-exporter-openstack-db-create-8pnk2\" (UID: \"086eb35c-13fa-4941-8812-712ca0a53fdc\") " pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.064207 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5r9c\" (UniqueName: \"kubernetes.io/projected/086eb35c-13fa-4941-8812-712ca0a53fdc-kube-api-access-t5r9c\") pod \"mysqld-exporter-openstack-db-create-8pnk2\" (UID: \"086eb35c-13fa-4941-8812-712ca0a53fdc\") " pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.064724 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/086eb35c-13fa-4941-8812-712ca0a53fdc-operator-scripts\") pod \"mysqld-exporter-openstack-db-create-8pnk2\" (UID: \"086eb35c-13fa-4941-8812-712ca0a53fdc\") " pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.086497 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5r9c\" (UniqueName: \"kubernetes.io/projected/086eb35c-13fa-4941-8812-712ca0a53fdc-kube-api-access-t5r9c\") pod \"mysqld-exporter-openstack-db-create-8pnk2\" (UID: \"086eb35c-13fa-4941-8812-712ca0a53fdc\") " pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.165534 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j95mz\" (UniqueName: \"kubernetes.io/projected/82ea2a63-302e-4238-b445-98df3d0bac7d-kube-api-access-j95mz\") pod \"mysqld-exporter-68a7-account-create-update-dpgxj\" (UID: \"82ea2a63-302e-4238-b445-98df3d0bac7d\") " pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.165697 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82ea2a63-302e-4238-b445-98df3d0bac7d-operator-scripts\") pod \"mysqld-exporter-68a7-account-create-update-dpgxj\" (UID: \"82ea2a63-302e-4238-b445-98df3d0bac7d\") " pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.202858 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.262307 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.267356 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82ea2a63-302e-4238-b445-98df3d0bac7d-operator-scripts\") pod \"mysqld-exporter-68a7-account-create-update-dpgxj\" (UID: \"82ea2a63-302e-4238-b445-98df3d0bac7d\") " pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.267476 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j95mz\" (UniqueName: \"kubernetes.io/projected/82ea2a63-302e-4238-b445-98df3d0bac7d-kube-api-access-j95mz\") pod \"mysqld-exporter-68a7-account-create-update-dpgxj\" (UID: \"82ea2a63-302e-4238-b445-98df3d0bac7d\") " pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.268066 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82ea2a63-302e-4238-b445-98df3d0bac7d-operator-scripts\") pod \"mysqld-exporter-68a7-account-create-update-dpgxj\" (UID: \"82ea2a63-302e-4238-b445-98df3d0bac7d\") " pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.288321 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j95mz\" (UniqueName: \"kubernetes.io/projected/82ea2a63-302e-4238-b445-98df3d0bac7d-kube-api-access-j95mz\") pod \"mysqld-exporter-68a7-account-create-update-dpgxj\" (UID: \"82ea2a63-302e-4238-b445-98df3d0bac7d\") " pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.326076 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.328061 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-n7vb6"] Dec 01 18:53:02 crc kubenswrapper[4935]: I1201 18:53:02.332369 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-n7vb6" podUID="c216b25c-f2ed-423c-8e80-829af72648ae" containerName="dnsmasq-dns" containerID="cri-o://8fc6e951327a0d602faa6716cc7d81ea36433cf5d77171bb96801b47815ad861" gracePeriod=10 Dec 01 18:53:03 crc kubenswrapper[4935]: I1201 18:53:03.281790 4935 generic.go:334] "Generic (PLEG): container finished" podID="c216b25c-f2ed-423c-8e80-829af72648ae" containerID="8fc6e951327a0d602faa6716cc7d81ea36433cf5d77171bb96801b47815ad861" exitCode=0 Dec 01 18:53:03 crc kubenswrapper[4935]: I1201 18:53:03.281898 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-n7vb6" event={"ID":"c216b25c-f2ed-423c-8e80-829af72648ae","Type":"ContainerDied","Data":"8fc6e951327a0d602faa6716cc7d81ea36433cf5d77171bb96801b47815ad861"} Dec 01 18:53:03 crc kubenswrapper[4935]: I1201 18:53:03.857793 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:53:03 crc kubenswrapper[4935]: I1201 18:53:03.906034 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-config\") pod \"c216b25c-f2ed-423c-8e80-829af72648ae\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " Dec 01 18:53:03 crc kubenswrapper[4935]: I1201 18:53:03.906094 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5x99n\" (UniqueName: \"kubernetes.io/projected/c216b25c-f2ed-423c-8e80-829af72648ae-kube-api-access-5x99n\") pod \"c216b25c-f2ed-423c-8e80-829af72648ae\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " Dec 01 18:53:03 crc kubenswrapper[4935]: I1201 18:53:03.906243 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-ovsdbserver-sb\") pod \"c216b25c-f2ed-423c-8e80-829af72648ae\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " Dec 01 18:53:03 crc kubenswrapper[4935]: I1201 18:53:03.906275 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-dns-svc\") pod \"c216b25c-f2ed-423c-8e80-829af72648ae\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " Dec 01 18:53:03 crc kubenswrapper[4935]: I1201 18:53:03.906427 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-ovsdbserver-nb\") pod \"c216b25c-f2ed-423c-8e80-829af72648ae\" (UID: \"c216b25c-f2ed-423c-8e80-829af72648ae\") " Dec 01 18:53:03 crc kubenswrapper[4935]: I1201 18:53:03.913708 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c216b25c-f2ed-423c-8e80-829af72648ae-kube-api-access-5x99n" (OuterVolumeSpecName: "kube-api-access-5x99n") pod "c216b25c-f2ed-423c-8e80-829af72648ae" (UID: "c216b25c-f2ed-423c-8e80-829af72648ae"). InnerVolumeSpecName "kube-api-access-5x99n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:03 crc kubenswrapper[4935]: I1201 18:53:03.989983 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c216b25c-f2ed-423c-8e80-829af72648ae" (UID: "c216b25c-f2ed-423c-8e80-829af72648ae"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:03 crc kubenswrapper[4935]: I1201 18:53:03.992798 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c216b25c-f2ed-423c-8e80-829af72648ae" (UID: "c216b25c-f2ed-423c-8e80-829af72648ae"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.006064 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-config" (OuterVolumeSpecName: "config") pod "c216b25c-f2ed-423c-8e80-829af72648ae" (UID: "c216b25c-f2ed-423c-8e80-829af72648ae"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.006581 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c216b25c-f2ed-423c-8e80-829af72648ae" (UID: "c216b25c-f2ed-423c-8e80-829af72648ae"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.011100 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.011123 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5x99n\" (UniqueName: \"kubernetes.io/projected/c216b25c-f2ed-423c-8e80-829af72648ae-kube-api-access-5x99n\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.011133 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.011142 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.011163 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c216b25c-f2ed-423c-8e80-829af72648ae-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.141868 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-605e-account-create-update-bfsft"] Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.216539 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-2e51-account-create-update-hwrgm"] Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.299409 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-n7vb6" event={"ID":"c216b25c-f2ed-423c-8e80-829af72648ae","Type":"ContainerDied","Data":"99b17b54ba1387a47513707bce80f907a0a57a8f5ea4f6d870a7740afad99652"} Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.299482 4935 scope.go:117] "RemoveContainer" containerID="8fc6e951327a0d602faa6716cc7d81ea36433cf5d77171bb96801b47815ad861" Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.299518 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-n7vb6" Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.350568 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-n7vb6"] Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.360657 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-n7vb6"] Dec 01 18:53:04 crc kubenswrapper[4935]: I1201 18:53:04.521491 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c216b25c-f2ed-423c-8e80-829af72648ae" path="/var/lib/kubelet/pods/c216b25c-f2ed-423c-8e80-829af72648ae/volumes" Dec 01 18:53:04 crc kubenswrapper[4935]: W1201 18:53:04.967020 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffd39947_9d32_4aca_ac65_83d13c6fc3d9.slice/crio-65f5fbb8ba9f9e08a19ac82fa70a5d4afbadf41d1f251df523cf433eae905f11 WatchSource:0}: Error finding container 65f5fbb8ba9f9e08a19ac82fa70a5d4afbadf41d1f251df523cf433eae905f11: Status 404 returned error can't find the container with id 65f5fbb8ba9f9e08a19ac82fa70a5d4afbadf41d1f251df523cf433eae905f11 Dec 01 18:53:05 crc kubenswrapper[4935]: I1201 18:53:05.028494 4935 scope.go:117] "RemoveContainer" containerID="7665c4205dd3065e37807673c831952c042d13e26eebd4a18e981abfc5c2be76" Dec 01 18:53:05 crc kubenswrapper[4935]: I1201 18:53:05.318719 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-2e51-account-create-update-hwrgm" event={"ID":"ffd39947-9d32-4aca-ac65-83d13c6fc3d9","Type":"ContainerStarted","Data":"65f5fbb8ba9f9e08a19ac82fa70a5d4afbadf41d1f251df523cf433eae905f11"} Dec 01 18:53:05 crc kubenswrapper[4935]: I1201 18:53:05.320617 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-605e-account-create-update-bfsft" event={"ID":"8ba93a6b-9a94-4dc1-8bb2-392863cb64ba","Type":"ContainerStarted","Data":"2da7359e9d3a358bd1387f62653647c9a53c0b05a1b388b864114bee13fcc08d"} Dec 01 18:53:05 crc kubenswrapper[4935]: I1201 18:53:05.603682 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-68a7-account-create-update-dpgxj"] Dec 01 18:53:05 crc kubenswrapper[4935]: W1201 18:53:05.606414 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod82ea2a63_302e_4238_b445_98df3d0bac7d.slice/crio-d8016efd74eb7f617475352bc0d1d3f4d260b5448bd6648b62f79d62371ff025 WatchSource:0}: Error finding container d8016efd74eb7f617475352bc0d1d3f4d260b5448bd6648b62f79d62371ff025: Status 404 returned error can't find the container with id d8016efd74eb7f617475352bc0d1d3f4d260b5448bd6648b62f79d62371ff025 Dec 01 18:53:05 crc kubenswrapper[4935]: I1201 18:53:05.624207 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-openstack-db-create-8pnk2"] Dec 01 18:53:05 crc kubenswrapper[4935]: I1201 18:53:05.650054 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-c9ba-account-create-update-jm7xw"] Dec 01 18:53:05 crc kubenswrapper[4935]: I1201 18:53:05.772660 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-wph7x"] Dec 01 18:53:05 crc kubenswrapper[4935]: I1201 18:53:05.877414 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-sgqs7"] Dec 01 18:53:05 crc kubenswrapper[4935]: W1201 18:53:05.881379 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2df55b0_7b7c_4330_8c63_c1fdc708b950.slice/crio-89528a2d7a039bb89e50fd43a3e2de319da466e81c2a1bf28d64b0a01219a71a WatchSource:0}: Error finding container 89528a2d7a039bb89e50fd43a3e2de319da466e81c2a1bf28d64b0a01219a71a: Status 404 returned error can't find the container with id 89528a2d7a039bb89e50fd43a3e2de319da466e81c2a1bf28d64b0a01219a71a Dec 01 18:53:05 crc kubenswrapper[4935]: I1201 18:53:05.945713 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-pq64s"] Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.333605 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" event={"ID":"82ea2a63-302e-4238-b445-98df3d0bac7d","Type":"ContainerStarted","Data":"a7234599bcf74ceabf90054559d822bcb88cbb1cff93800a1fda6c77e26c35b7"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.333872 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" event={"ID":"82ea2a63-302e-4238-b445-98df3d0bac7d","Type":"ContainerStarted","Data":"d8016efd74eb7f617475352bc0d1d3f4d260b5448bd6648b62f79d62371ff025"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.336713 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" event={"ID":"086eb35c-13fa-4941-8812-712ca0a53fdc","Type":"ContainerStarted","Data":"69c6a7720248d1520ab12ee6dabb92f2c396ef1accea9a378018a9d7edceae15"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.336743 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" event={"ID":"086eb35c-13fa-4941-8812-712ca0a53fdc","Type":"ContainerStarted","Data":"0ba87a426d6f9df479fb64f2f73f887a32c11c85610195eb13feda6d8cb7112d"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.343961 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2046ae6b-b1cd-421d-a4b0-686e1e29c407","Type":"ContainerStarted","Data":"dde129be6a644def858efe83416b6954e828146ddc0b245da0554f4d3fedcd65"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.348434 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c9ba-account-create-update-jm7xw" event={"ID":"7d1fbcb0-5eaa-4512-bae5-0759240427f7","Type":"ContainerStarted","Data":"67962251ed45178d83235949cf126a542e946bc8b9e8b45060209f3efe536cc3"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.348593 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c9ba-account-create-update-jm7xw" event={"ID":"7d1fbcb0-5eaa-4512-bae5-0759240427f7","Type":"ContainerStarted","Data":"3775666ac0f98b390c67fba4a77a6af545c7e26209ecd52e3020ef484537e589"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.353309 4935 generic.go:334] "Generic (PLEG): container finished" podID="ffd39947-9d32-4aca-ac65-83d13c6fc3d9" containerID="af689a59f2ed63212cc38e7e2fcf05d4aec8726fc26015b05f07bd47ad17db54" exitCode=0 Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.353359 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-2e51-account-create-update-hwrgm" event={"ID":"ffd39947-9d32-4aca-ac65-83d13c6fc3d9","Type":"ContainerDied","Data":"af689a59f2ed63212cc38e7e2fcf05d4aec8726fc26015b05f07bd47ad17db54"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.357038 4935 generic.go:334] "Generic (PLEG): container finished" podID="8ba93a6b-9a94-4dc1-8bb2-392863cb64ba" containerID="3b5e7c06e3947b1239cbc24ee7388e566a5e74282ad5aa98e0bf1c0d6d02ba16" exitCode=0 Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.357183 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-605e-account-create-update-bfsft" event={"ID":"8ba93a6b-9a94-4dc1-8bb2-392863cb64ba","Type":"ContainerDied","Data":"3b5e7c06e3947b1239cbc24ee7388e566a5e74282ad5aa98e0bf1c0d6d02ba16"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.360653 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2v2t8" event={"ID":"42f244f1-1b31-4831-8f12-f95ef0199c7c","Type":"ContainerStarted","Data":"42ce040c8635cd9251d151775959ab831f10d26eb51c288a5639bb70ab959ef3"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.368520 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wph7x" event={"ID":"475f0637-3250-46ef-bafa-c3a57c5780a2","Type":"ContainerStarted","Data":"5a22f1e43d5ad5ab08ffe12f8af805c0f3f992c67e874b8bfa19aa7e888d9153"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.368719 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wph7x" event={"ID":"475f0637-3250-46ef-bafa-c3a57c5780a2","Type":"ContainerStarted","Data":"89ea0802c54dbe2e519a2f56ce3ef5b0690b7cd5444d8d02401519495c4fc9b4"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.369300 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" podStartSLOduration=5.369263097 podStartE2EDuration="5.369263097s" podCreationTimestamp="2025-12-01 18:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:53:06.353596031 +0000 UTC m=+1400.375225290" watchObservedRunningTime="2025-12-01 18:53:06.369263097 +0000 UTC m=+1400.390892356" Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.375291 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-sgqs7" event={"ID":"c2df55b0-7b7c-4330-8c63-c1fdc708b950","Type":"ContainerStarted","Data":"725df424051b41ce28f3fbef2a77bc719d8b01bac141ca6a0bc2b53be02b8227"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.375319 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-sgqs7" event={"ID":"c2df55b0-7b7c-4330-8c63-c1fdc708b950","Type":"ContainerStarted","Data":"89528a2d7a039bb89e50fd43a3e2de319da466e81c2a1bf28d64b0a01219a71a"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.385444 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-pq64s" event={"ID":"00b6178d-eeba-450b-b8e6-289bba7db372","Type":"ContainerStarted","Data":"1141d1b2daa6d355f7be4666cd3adf07d9676e697c3a9274d42b6f53e9cce9e7"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.385483 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-pq64s" event={"ID":"00b6178d-eeba-450b-b8e6-289bba7db372","Type":"ContainerStarted","Data":"454f7dd53dcf39f21c26755a627b59c001adc9f02502346985ada91f626ab96c"} Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.389653 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-c9ba-account-create-update-jm7xw" podStartSLOduration=6.38963956 podStartE2EDuration="6.38963956s" podCreationTimestamp="2025-12-01 18:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:53:06.381874685 +0000 UTC m=+1400.403503944" watchObservedRunningTime="2025-12-01 18:53:06.38963956 +0000 UTC m=+1400.411268819" Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.406078 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" podStartSLOduration=5.40606225 podStartE2EDuration="5.40606225s" podCreationTimestamp="2025-12-01 18:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:53:06.395713332 +0000 UTC m=+1400.417342581" watchObservedRunningTime="2025-12-01 18:53:06.40606225 +0000 UTC m=+1400.427691509" Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.413445 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-wph7x" podStartSLOduration=7.413427833 podStartE2EDuration="7.413427833s" podCreationTimestamp="2025-12-01 18:52:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:53:06.411295895 +0000 UTC m=+1400.432925144" watchObservedRunningTime="2025-12-01 18:53:06.413427833 +0000 UTC m=+1400.435057092" Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.448677 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-sgqs7" podStartSLOduration=7.448660286 podStartE2EDuration="7.448660286s" podCreationTimestamp="2025-12-01 18:52:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:53:06.430300326 +0000 UTC m=+1400.451929585" watchObservedRunningTime="2025-12-01 18:53:06.448660286 +0000 UTC m=+1400.470289545" Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.472047 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-pq64s" podStartSLOduration=6.4720250440000004 podStartE2EDuration="6.472025044s" podCreationTimestamp="2025-12-01 18:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:53:06.445166416 +0000 UTC m=+1400.466795665" watchObservedRunningTime="2025-12-01 18:53:06.472025044 +0000 UTC m=+1400.493654303" Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.535485 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-2v2t8" podStartSLOduration=3.28106489 podStartE2EDuration="10.535468229s" podCreationTimestamp="2025-12-01 18:52:56 +0000 UTC" firstStartedPulling="2025-12-01 18:52:57.822299931 +0000 UTC m=+1391.843929200" lastFinishedPulling="2025-12-01 18:53:05.07670328 +0000 UTC m=+1399.098332539" observedRunningTime="2025-12-01 18:53:06.502720834 +0000 UTC m=+1400.524350093" watchObservedRunningTime="2025-12-01 18:53:06.535468229 +0000 UTC m=+1400.557097488" Dec 01 18:53:06 crc kubenswrapper[4935]: I1201 18:53:06.674982 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 01 18:53:07 crc kubenswrapper[4935]: I1201 18:53:07.401356 4935 generic.go:334] "Generic (PLEG): container finished" podID="82ea2a63-302e-4238-b445-98df3d0bac7d" containerID="a7234599bcf74ceabf90054559d822bcb88cbb1cff93800a1fda6c77e26c35b7" exitCode=0 Dec 01 18:53:07 crc kubenswrapper[4935]: I1201 18:53:07.401710 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" event={"ID":"82ea2a63-302e-4238-b445-98df3d0bac7d","Type":"ContainerDied","Data":"a7234599bcf74ceabf90054559d822bcb88cbb1cff93800a1fda6c77e26c35b7"} Dec 01 18:53:07 crc kubenswrapper[4935]: I1201 18:53:07.405649 4935 generic.go:334] "Generic (PLEG): container finished" podID="086eb35c-13fa-4941-8812-712ca0a53fdc" containerID="69c6a7720248d1520ab12ee6dabb92f2c396ef1accea9a378018a9d7edceae15" exitCode=0 Dec 01 18:53:07 crc kubenswrapper[4935]: I1201 18:53:07.405726 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" event={"ID":"086eb35c-13fa-4941-8812-712ca0a53fdc","Type":"ContainerDied","Data":"69c6a7720248d1520ab12ee6dabb92f2c396ef1accea9a378018a9d7edceae15"} Dec 01 18:53:07 crc kubenswrapper[4935]: I1201 18:53:07.408466 4935 generic.go:334] "Generic (PLEG): container finished" podID="475f0637-3250-46ef-bafa-c3a57c5780a2" containerID="5a22f1e43d5ad5ab08ffe12f8af805c0f3f992c67e874b8bfa19aa7e888d9153" exitCode=0 Dec 01 18:53:07 crc kubenswrapper[4935]: I1201 18:53:07.408551 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wph7x" event={"ID":"475f0637-3250-46ef-bafa-c3a57c5780a2","Type":"ContainerDied","Data":"5a22f1e43d5ad5ab08ffe12f8af805c0f3f992c67e874b8bfa19aa7e888d9153"} Dec 01 18:53:07 crc kubenswrapper[4935]: I1201 18:53:07.414064 4935 generic.go:334] "Generic (PLEG): container finished" podID="c2df55b0-7b7c-4330-8c63-c1fdc708b950" containerID="725df424051b41ce28f3fbef2a77bc719d8b01bac141ca6a0bc2b53be02b8227" exitCode=0 Dec 01 18:53:07 crc kubenswrapper[4935]: I1201 18:53:07.414119 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-sgqs7" event={"ID":"c2df55b0-7b7c-4330-8c63-c1fdc708b950","Type":"ContainerDied","Data":"725df424051b41ce28f3fbef2a77bc719d8b01bac141ca6a0bc2b53be02b8227"} Dec 01 18:53:07 crc kubenswrapper[4935]: I1201 18:53:07.416395 4935 generic.go:334] "Generic (PLEG): container finished" podID="7d1fbcb0-5eaa-4512-bae5-0759240427f7" containerID="67962251ed45178d83235949cf126a542e946bc8b9e8b45060209f3efe536cc3" exitCode=0 Dec 01 18:53:07 crc kubenswrapper[4935]: I1201 18:53:07.416460 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c9ba-account-create-update-jm7xw" event={"ID":"7d1fbcb0-5eaa-4512-bae5-0759240427f7","Type":"ContainerDied","Data":"67962251ed45178d83235949cf126a542e946bc8b9e8b45060209f3efe536cc3"} Dec 01 18:53:07 crc kubenswrapper[4935]: I1201 18:53:07.426268 4935 generic.go:334] "Generic (PLEG): container finished" podID="00b6178d-eeba-450b-b8e6-289bba7db372" containerID="1141d1b2daa6d355f7be4666cd3adf07d9676e697c3a9274d42b6f53e9cce9e7" exitCode=0 Dec 01 18:53:07 crc kubenswrapper[4935]: I1201 18:53:07.427128 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-pq64s" event={"ID":"00b6178d-eeba-450b-b8e6-289bba7db372","Type":"ContainerDied","Data":"1141d1b2daa6d355f7be4666cd3adf07d9676e697c3a9274d42b6f53e9cce9e7"} Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.079592 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-2e51-account-create-update-hwrgm" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.086679 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-605e-account-create-update-bfsft" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.150610 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spzs5\" (UniqueName: \"kubernetes.io/projected/ffd39947-9d32-4aca-ac65-83d13c6fc3d9-kube-api-access-spzs5\") pod \"ffd39947-9d32-4aca-ac65-83d13c6fc3d9\" (UID: \"ffd39947-9d32-4aca-ac65-83d13c6fc3d9\") " Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.150910 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffd39947-9d32-4aca-ac65-83d13c6fc3d9-operator-scripts\") pod \"ffd39947-9d32-4aca-ac65-83d13c6fc3d9\" (UID: \"ffd39947-9d32-4aca-ac65-83d13c6fc3d9\") " Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.151625 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffd39947-9d32-4aca-ac65-83d13c6fc3d9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ffd39947-9d32-4aca-ac65-83d13c6fc3d9" (UID: "ffd39947-9d32-4aca-ac65-83d13c6fc3d9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.161742 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffd39947-9d32-4aca-ac65-83d13c6fc3d9-kube-api-access-spzs5" (OuterVolumeSpecName: "kube-api-access-spzs5") pod "ffd39947-9d32-4aca-ac65-83d13c6fc3d9" (UID: "ffd39947-9d32-4aca-ac65-83d13c6fc3d9"). InnerVolumeSpecName "kube-api-access-spzs5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.253073 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtnpl\" (UniqueName: \"kubernetes.io/projected/8ba93a6b-9a94-4dc1-8bb2-392863cb64ba-kube-api-access-dtnpl\") pod \"8ba93a6b-9a94-4dc1-8bb2-392863cb64ba\" (UID: \"8ba93a6b-9a94-4dc1-8bb2-392863cb64ba\") " Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.253175 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8ba93a6b-9a94-4dc1-8bb2-392863cb64ba-operator-scripts\") pod \"8ba93a6b-9a94-4dc1-8bb2-392863cb64ba\" (UID: \"8ba93a6b-9a94-4dc1-8bb2-392863cb64ba\") " Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.253932 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ba93a6b-9a94-4dc1-8bb2-392863cb64ba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8ba93a6b-9a94-4dc1-8bb2-392863cb64ba" (UID: "8ba93a6b-9a94-4dc1-8bb2-392863cb64ba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.254141 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffd39947-9d32-4aca-ac65-83d13c6fc3d9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.254191 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8ba93a6b-9a94-4dc1-8bb2-392863cb64ba-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.254204 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spzs5\" (UniqueName: \"kubernetes.io/projected/ffd39947-9d32-4aca-ac65-83d13c6fc3d9-kube-api-access-spzs5\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.256247 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ba93a6b-9a94-4dc1-8bb2-392863cb64ba-kube-api-access-dtnpl" (OuterVolumeSpecName: "kube-api-access-dtnpl") pod "8ba93a6b-9a94-4dc1-8bb2-392863cb64ba" (UID: "8ba93a6b-9a94-4dc1-8bb2-392863cb64ba"). InnerVolumeSpecName "kube-api-access-dtnpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.356386 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtnpl\" (UniqueName: \"kubernetes.io/projected/8ba93a6b-9a94-4dc1-8bb2-392863cb64ba-kube-api-access-dtnpl\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.441925 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-2e51-account-create-update-hwrgm" event={"ID":"ffd39947-9d32-4aca-ac65-83d13c6fc3d9","Type":"ContainerDied","Data":"65f5fbb8ba9f9e08a19ac82fa70a5d4afbadf41d1f251df523cf433eae905f11"} Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.441995 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65f5fbb8ba9f9e08a19ac82fa70a5d4afbadf41d1f251df523cf433eae905f11" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.444933 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-2e51-account-create-update-hwrgm" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.446813 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-605e-account-create-update-bfsft" event={"ID":"8ba93a6b-9a94-4dc1-8bb2-392863cb64ba","Type":"ContainerDied","Data":"2da7359e9d3a358bd1387f62653647c9a53c0b05a1b388b864114bee13fcc08d"} Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.446864 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2da7359e9d3a358bd1387f62653647c9a53c0b05a1b388b864114bee13fcc08d" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.446917 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-605e-account-create-update-bfsft" Dec 01 18:53:08 crc kubenswrapper[4935]: I1201 18:53:08.977885 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:53:08 crc kubenswrapper[4935]: E1201 18:53:08.978372 4935 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 18:53:08 crc kubenswrapper[4935]: E1201 18:53:08.978535 4935 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 18:53:08 crc kubenswrapper[4935]: E1201 18:53:08.978583 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift podName:e0ee2844-1713-4b15-81f5-138cbc14fe03 nodeName:}" failed. No retries permitted until 2025-12-01 18:53:24.978565205 +0000 UTC m=+1419.000194464 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift") pod "swift-storage-0" (UID: "e0ee2844-1713-4b15-81f5-138cbc14fe03") : configmap "swift-ring-files" not found Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.244275 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c9ba-account-create-update-jm7xw" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.250509 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.259884 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-sgqs7" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.264998 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wph7x" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.389212 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2df55b0-7b7c-4330-8c63-c1fdc708b950-operator-scripts\") pod \"c2df55b0-7b7c-4330-8c63-c1fdc708b950\" (UID: \"c2df55b0-7b7c-4330-8c63-c1fdc708b950\") " Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.389527 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5r9c\" (UniqueName: \"kubernetes.io/projected/086eb35c-13fa-4941-8812-712ca0a53fdc-kube-api-access-t5r9c\") pod \"086eb35c-13fa-4941-8812-712ca0a53fdc\" (UID: \"086eb35c-13fa-4941-8812-712ca0a53fdc\") " Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.389621 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w895j\" (UniqueName: \"kubernetes.io/projected/475f0637-3250-46ef-bafa-c3a57c5780a2-kube-api-access-w895j\") pod \"475f0637-3250-46ef-bafa-c3a57c5780a2\" (UID: \"475f0637-3250-46ef-bafa-c3a57c5780a2\") " Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.389647 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/475f0637-3250-46ef-bafa-c3a57c5780a2-operator-scripts\") pod \"475f0637-3250-46ef-bafa-c3a57c5780a2\" (UID: \"475f0637-3250-46ef-bafa-c3a57c5780a2\") " Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.389716 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d1fbcb0-5eaa-4512-bae5-0759240427f7-operator-scripts\") pod \"7d1fbcb0-5eaa-4512-bae5-0759240427f7\" (UID: \"7d1fbcb0-5eaa-4512-bae5-0759240427f7\") " Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.389748 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fb89l\" (UniqueName: \"kubernetes.io/projected/c2df55b0-7b7c-4330-8c63-c1fdc708b950-kube-api-access-fb89l\") pod \"c2df55b0-7b7c-4330-8c63-c1fdc708b950\" (UID: \"c2df55b0-7b7c-4330-8c63-c1fdc708b950\") " Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.389768 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/086eb35c-13fa-4941-8812-712ca0a53fdc-operator-scripts\") pod \"086eb35c-13fa-4941-8812-712ca0a53fdc\" (UID: \"086eb35c-13fa-4941-8812-712ca0a53fdc\") " Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.389804 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mr2n\" (UniqueName: \"kubernetes.io/projected/7d1fbcb0-5eaa-4512-bae5-0759240427f7-kube-api-access-7mr2n\") pod \"7d1fbcb0-5eaa-4512-bae5-0759240427f7\" (UID: \"7d1fbcb0-5eaa-4512-bae5-0759240427f7\") " Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.389962 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2df55b0-7b7c-4330-8c63-c1fdc708b950-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c2df55b0-7b7c-4330-8c63-c1fdc708b950" (UID: "c2df55b0-7b7c-4330-8c63-c1fdc708b950"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.390279 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2df55b0-7b7c-4330-8c63-c1fdc708b950-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.390376 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/475f0637-3250-46ef-bafa-c3a57c5780a2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "475f0637-3250-46ef-bafa-c3a57c5780a2" (UID: "475f0637-3250-46ef-bafa-c3a57c5780a2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.391244 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d1fbcb0-5eaa-4512-bae5-0759240427f7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7d1fbcb0-5eaa-4512-bae5-0759240427f7" (UID: "7d1fbcb0-5eaa-4512-bae5-0759240427f7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.391539 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/086eb35c-13fa-4941-8812-712ca0a53fdc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "086eb35c-13fa-4941-8812-712ca0a53fdc" (UID: "086eb35c-13fa-4941-8812-712ca0a53fdc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.396449 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/475f0637-3250-46ef-bafa-c3a57c5780a2-kube-api-access-w895j" (OuterVolumeSpecName: "kube-api-access-w895j") pod "475f0637-3250-46ef-bafa-c3a57c5780a2" (UID: "475f0637-3250-46ef-bafa-c3a57c5780a2"). InnerVolumeSpecName "kube-api-access-w895j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.397094 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d1fbcb0-5eaa-4512-bae5-0759240427f7-kube-api-access-7mr2n" (OuterVolumeSpecName: "kube-api-access-7mr2n") pod "7d1fbcb0-5eaa-4512-bae5-0759240427f7" (UID: "7d1fbcb0-5eaa-4512-bae5-0759240427f7"). InnerVolumeSpecName "kube-api-access-7mr2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.398141 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/086eb35c-13fa-4941-8812-712ca0a53fdc-kube-api-access-t5r9c" (OuterVolumeSpecName: "kube-api-access-t5r9c") pod "086eb35c-13fa-4941-8812-712ca0a53fdc" (UID: "086eb35c-13fa-4941-8812-712ca0a53fdc"). InnerVolumeSpecName "kube-api-access-t5r9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.398856 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2df55b0-7b7c-4330-8c63-c1fdc708b950-kube-api-access-fb89l" (OuterVolumeSpecName: "kube-api-access-fb89l") pod "c2df55b0-7b7c-4330-8c63-c1fdc708b950" (UID: "c2df55b0-7b7c-4330-8c63-c1fdc708b950"). InnerVolumeSpecName "kube-api-access-fb89l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.415713 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.459807 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-sgqs7" event={"ID":"c2df55b0-7b7c-4330-8c63-c1fdc708b950","Type":"ContainerDied","Data":"89528a2d7a039bb89e50fd43a3e2de319da466e81c2a1bf28d64b0a01219a71a"} Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.459852 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89528a2d7a039bb89e50fd43a3e2de319da466e81c2a1bf28d64b0a01219a71a" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.460282 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-sgqs7" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.462832 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c9ba-account-create-update-jm7xw" event={"ID":"7d1fbcb0-5eaa-4512-bae5-0759240427f7","Type":"ContainerDied","Data":"3775666ac0f98b390c67fba4a77a6af545c7e26209ecd52e3020ef484537e589"} Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.462868 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3775666ac0f98b390c67fba4a77a6af545c7e26209ecd52e3020ef484537e589" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.462905 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c9ba-account-create-update-jm7xw" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.464619 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" event={"ID":"82ea2a63-302e-4238-b445-98df3d0bac7d","Type":"ContainerDied","Data":"d8016efd74eb7f617475352bc0d1d3f4d260b5448bd6648b62f79d62371ff025"} Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.464706 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d8016efd74eb7f617475352bc0d1d3f4d260b5448bd6648b62f79d62371ff025" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.464638 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-68a7-account-create-update-dpgxj" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.469677 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" event={"ID":"086eb35c-13fa-4941-8812-712ca0a53fdc","Type":"ContainerDied","Data":"0ba87a426d6f9df479fb64f2f73f887a32c11c85610195eb13feda6d8cb7112d"} Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.469757 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ba87a426d6f9df479fb64f2f73f887a32c11c85610195eb13feda6d8cb7112d" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.469844 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-db-create-8pnk2" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.479647 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2046ae6b-b1cd-421d-a4b0-686e1e29c407","Type":"ContainerStarted","Data":"1bbad29206b064f19ae94b6e8660e2e1b43d375841c23731992346c8000589f0"} Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.481976 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wph7x" event={"ID":"475f0637-3250-46ef-bafa-c3a57c5780a2","Type":"ContainerDied","Data":"89ea0802c54dbe2e519a2f56ce3ef5b0690b7cd5444d8d02401519495c4fc9b4"} Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.482021 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89ea0802c54dbe2e519a2f56ce3ef5b0690b7cd5444d8d02401519495c4fc9b4" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.482086 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wph7x" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.492355 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82ea2a63-302e-4238-b445-98df3d0bac7d-operator-scripts\") pod \"82ea2a63-302e-4238-b445-98df3d0bac7d\" (UID: \"82ea2a63-302e-4238-b445-98df3d0bac7d\") " Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.494096 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j95mz\" (UniqueName: \"kubernetes.io/projected/82ea2a63-302e-4238-b445-98df3d0bac7d-kube-api-access-j95mz\") pod \"82ea2a63-302e-4238-b445-98df3d0bac7d\" (UID: \"82ea2a63-302e-4238-b445-98df3d0bac7d\") " Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.494893 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d1fbcb0-5eaa-4512-bae5-0759240427f7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.494910 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fb89l\" (UniqueName: \"kubernetes.io/projected/c2df55b0-7b7c-4330-8c63-c1fdc708b950-kube-api-access-fb89l\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.494922 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/086eb35c-13fa-4941-8812-712ca0a53fdc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.494931 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mr2n\" (UniqueName: \"kubernetes.io/projected/7d1fbcb0-5eaa-4512-bae5-0759240427f7-kube-api-access-7mr2n\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.494942 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5r9c\" (UniqueName: \"kubernetes.io/projected/086eb35c-13fa-4941-8812-712ca0a53fdc-kube-api-access-t5r9c\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.494954 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w895j\" (UniqueName: \"kubernetes.io/projected/475f0637-3250-46ef-bafa-c3a57c5780a2-kube-api-access-w895j\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.494963 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/475f0637-3250-46ef-bafa-c3a57c5780a2-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.511930 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82ea2a63-302e-4238-b445-98df3d0bac7d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "82ea2a63-302e-4238-b445-98df3d0bac7d" (UID: "82ea2a63-302e-4238-b445-98df3d0bac7d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.517935 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82ea2a63-302e-4238-b445-98df3d0bac7d-kube-api-access-j95mz" (OuterVolumeSpecName: "kube-api-access-j95mz") pod "82ea2a63-302e-4238-b445-98df3d0bac7d" (UID: "82ea2a63-302e-4238-b445-98df3d0bac7d"). InnerVolumeSpecName "kube-api-access-j95mz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.597916 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82ea2a63-302e-4238-b445-98df3d0bac7d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.597946 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j95mz\" (UniqueName: \"kubernetes.io/projected/82ea2a63-302e-4238-b445-98df3d0bac7d-kube-api-access-j95mz\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.623378 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-pq64s" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.699234 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wqxr\" (UniqueName: \"kubernetes.io/projected/00b6178d-eeba-450b-b8e6-289bba7db372-kube-api-access-9wqxr\") pod \"00b6178d-eeba-450b-b8e6-289bba7db372\" (UID: \"00b6178d-eeba-450b-b8e6-289bba7db372\") " Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.699567 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00b6178d-eeba-450b-b8e6-289bba7db372-operator-scripts\") pod \"00b6178d-eeba-450b-b8e6-289bba7db372\" (UID: \"00b6178d-eeba-450b-b8e6-289bba7db372\") " Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.700051 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00b6178d-eeba-450b-b8e6-289bba7db372-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "00b6178d-eeba-450b-b8e6-289bba7db372" (UID: "00b6178d-eeba-450b-b8e6-289bba7db372"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.700687 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00b6178d-eeba-450b-b8e6-289bba7db372-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.794656 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00b6178d-eeba-450b-b8e6-289bba7db372-kube-api-access-9wqxr" (OuterVolumeSpecName: "kube-api-access-9wqxr") pod "00b6178d-eeba-450b-b8e6-289bba7db372" (UID: "00b6178d-eeba-450b-b8e6-289bba7db372"). InnerVolumeSpecName "kube-api-access-9wqxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:09 crc kubenswrapper[4935]: I1201 18:53:09.805585 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wqxr\" (UniqueName: \"kubernetes.io/projected/00b6178d-eeba-450b-b8e6-289bba7db372-kube-api-access-9wqxr\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:10 crc kubenswrapper[4935]: I1201 18:53:10.501641 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-pq64s" event={"ID":"00b6178d-eeba-450b-b8e6-289bba7db372","Type":"ContainerDied","Data":"454f7dd53dcf39f21c26755a627b59c001adc9f02502346985ada91f626ab96c"} Dec 01 18:53:10 crc kubenswrapper[4935]: I1201 18:53:10.501953 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="454f7dd53dcf39f21c26755a627b59c001adc9f02502346985ada91f626ab96c" Dec 01 18:53:10 crc kubenswrapper[4935]: I1201 18:53:10.502042 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-pq64s" Dec 01 18:53:11 crc kubenswrapper[4935]: I1201 18:53:11.917107 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.296680 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g"] Dec 01 18:53:12 crc kubenswrapper[4935]: E1201 18:53:12.297241 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffd39947-9d32-4aca-ac65-83d13c6fc3d9" containerName="mariadb-account-create-update" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297262 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffd39947-9d32-4aca-ac65-83d13c6fc3d9" containerName="mariadb-account-create-update" Dec 01 18:53:12 crc kubenswrapper[4935]: E1201 18:53:12.297294 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2df55b0-7b7c-4330-8c63-c1fdc708b950" containerName="mariadb-database-create" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297305 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2df55b0-7b7c-4330-8c63-c1fdc708b950" containerName="mariadb-database-create" Dec 01 18:53:12 crc kubenswrapper[4935]: E1201 18:53:12.297320 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c216b25c-f2ed-423c-8e80-829af72648ae" containerName="dnsmasq-dns" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297332 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c216b25c-f2ed-423c-8e80-829af72648ae" containerName="dnsmasq-dns" Dec 01 18:53:12 crc kubenswrapper[4935]: E1201 18:53:12.297362 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00b6178d-eeba-450b-b8e6-289bba7db372" containerName="mariadb-database-create" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297374 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="00b6178d-eeba-450b-b8e6-289bba7db372" containerName="mariadb-database-create" Dec 01 18:53:12 crc kubenswrapper[4935]: E1201 18:53:12.297389 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d1fbcb0-5eaa-4512-bae5-0759240427f7" containerName="mariadb-account-create-update" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297400 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d1fbcb0-5eaa-4512-bae5-0759240427f7" containerName="mariadb-account-create-update" Dec 01 18:53:12 crc kubenswrapper[4935]: E1201 18:53:12.297414 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c216b25c-f2ed-423c-8e80-829af72648ae" containerName="init" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297425 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c216b25c-f2ed-423c-8e80-829af72648ae" containerName="init" Dec 01 18:53:12 crc kubenswrapper[4935]: E1201 18:53:12.297440 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba93a6b-9a94-4dc1-8bb2-392863cb64ba" containerName="mariadb-account-create-update" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297448 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba93a6b-9a94-4dc1-8bb2-392863cb64ba" containerName="mariadb-account-create-update" Dec 01 18:53:12 crc kubenswrapper[4935]: E1201 18:53:12.297464 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82ea2a63-302e-4238-b445-98df3d0bac7d" containerName="mariadb-account-create-update" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297473 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="82ea2a63-302e-4238-b445-98df3d0bac7d" containerName="mariadb-account-create-update" Dec 01 18:53:12 crc kubenswrapper[4935]: E1201 18:53:12.297486 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="475f0637-3250-46ef-bafa-c3a57c5780a2" containerName="mariadb-database-create" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297494 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="475f0637-3250-46ef-bafa-c3a57c5780a2" containerName="mariadb-database-create" Dec 01 18:53:12 crc kubenswrapper[4935]: E1201 18:53:12.297510 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="086eb35c-13fa-4941-8812-712ca0a53fdc" containerName="mariadb-database-create" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297518 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="086eb35c-13fa-4941-8812-712ca0a53fdc" containerName="mariadb-database-create" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297745 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d1fbcb0-5eaa-4512-bae5-0759240427f7" containerName="mariadb-account-create-update" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297777 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="00b6178d-eeba-450b-b8e6-289bba7db372" containerName="mariadb-database-create" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297794 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="82ea2a63-302e-4238-b445-98df3d0bac7d" containerName="mariadb-account-create-update" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297808 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="086eb35c-13fa-4941-8812-712ca0a53fdc" containerName="mariadb-database-create" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297819 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="475f0637-3250-46ef-bafa-c3a57c5780a2" containerName="mariadb-database-create" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297834 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c216b25c-f2ed-423c-8e80-829af72648ae" containerName="dnsmasq-dns" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297850 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffd39947-9d32-4aca-ac65-83d13c6fc3d9" containerName="mariadb-account-create-update" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297862 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2df55b0-7b7c-4330-8c63-c1fdc708b950" containerName="mariadb-database-create" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.297881 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba93a6b-9a94-4dc1-8bb2-392863cb64ba" containerName="mariadb-account-create-update" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.300051 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.316370 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g"] Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.357508 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctdng\" (UniqueName: \"kubernetes.io/projected/e720e2eb-6a00-48ff-aac7-5f6cf40dfb70-kube-api-access-ctdng\") pod \"mysqld-exporter-openstack-cell1-db-create-hnl8g\" (UID: \"e720e2eb-6a00-48ff-aac7-5f6cf40dfb70\") " pod="openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.357552 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e720e2eb-6a00-48ff-aac7-5f6cf40dfb70-operator-scripts\") pod \"mysqld-exporter-openstack-cell1-db-create-hnl8g\" (UID: \"e720e2eb-6a00-48ff-aac7-5f6cf40dfb70\") " pod="openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.401805 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mysqld-exporter-6297-account-create-update-xm8f8"] Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.403114 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-6297-account-create-update-xm8f8" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.405244 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"mysqld-exporter-openstack-cell1-db-secret" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.420983 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-6297-account-create-update-xm8f8"] Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.459235 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e720e2eb-6a00-48ff-aac7-5f6cf40dfb70-operator-scripts\") pod \"mysqld-exporter-openstack-cell1-db-create-hnl8g\" (UID: \"e720e2eb-6a00-48ff-aac7-5f6cf40dfb70\") " pod="openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.459521 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctdng\" (UniqueName: \"kubernetes.io/projected/e720e2eb-6a00-48ff-aac7-5f6cf40dfb70-kube-api-access-ctdng\") pod \"mysqld-exporter-openstack-cell1-db-create-hnl8g\" (UID: \"e720e2eb-6a00-48ff-aac7-5f6cf40dfb70\") " pod="openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.460034 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e720e2eb-6a00-48ff-aac7-5f6cf40dfb70-operator-scripts\") pod \"mysqld-exporter-openstack-cell1-db-create-hnl8g\" (UID: \"e720e2eb-6a00-48ff-aac7-5f6cf40dfb70\") " pod="openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.477477 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctdng\" (UniqueName: \"kubernetes.io/projected/e720e2eb-6a00-48ff-aac7-5f6cf40dfb70-kube-api-access-ctdng\") pod \"mysqld-exporter-openstack-cell1-db-create-hnl8g\" (UID: \"e720e2eb-6a00-48ff-aac7-5f6cf40dfb70\") " pod="openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.562158 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db74a42b-e575-4904-8c66-e51d66b66278-operator-scripts\") pod \"mysqld-exporter-6297-account-create-update-xm8f8\" (UID: \"db74a42b-e575-4904-8c66-e51d66b66278\") " pod="openstack/mysqld-exporter-6297-account-create-update-xm8f8" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.562383 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd8lh\" (UniqueName: \"kubernetes.io/projected/db74a42b-e575-4904-8c66-e51d66b66278-kube-api-access-pd8lh\") pod \"mysqld-exporter-6297-account-create-update-xm8f8\" (UID: \"db74a42b-e575-4904-8c66-e51d66b66278\") " pod="openstack/mysqld-exporter-6297-account-create-update-xm8f8" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.630934 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.663652 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd8lh\" (UniqueName: \"kubernetes.io/projected/db74a42b-e575-4904-8c66-e51d66b66278-kube-api-access-pd8lh\") pod \"mysqld-exporter-6297-account-create-update-xm8f8\" (UID: \"db74a42b-e575-4904-8c66-e51d66b66278\") " pod="openstack/mysqld-exporter-6297-account-create-update-xm8f8" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.663839 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db74a42b-e575-4904-8c66-e51d66b66278-operator-scripts\") pod \"mysqld-exporter-6297-account-create-update-xm8f8\" (UID: \"db74a42b-e575-4904-8c66-e51d66b66278\") " pod="openstack/mysqld-exporter-6297-account-create-update-xm8f8" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.667282 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db74a42b-e575-4904-8c66-e51d66b66278-operator-scripts\") pod \"mysqld-exporter-6297-account-create-update-xm8f8\" (UID: \"db74a42b-e575-4904-8c66-e51d66b66278\") " pod="openstack/mysqld-exporter-6297-account-create-update-xm8f8" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.685821 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd8lh\" (UniqueName: \"kubernetes.io/projected/db74a42b-e575-4904-8c66-e51d66b66278-kube-api-access-pd8lh\") pod \"mysqld-exporter-6297-account-create-update-xm8f8\" (UID: \"db74a42b-e575-4904-8c66-e51d66b66278\") " pod="openstack/mysqld-exporter-6297-account-create-update-xm8f8" Dec 01 18:53:12 crc kubenswrapper[4935]: I1201 18:53:12.727676 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-6297-account-create-update-xm8f8" Dec 01 18:53:13 crc kubenswrapper[4935]: I1201 18:53:13.560727 4935 generic.go:334] "Generic (PLEG): container finished" podID="42f244f1-1b31-4831-8f12-f95ef0199c7c" containerID="42ce040c8635cd9251d151775959ab831f10d26eb51c288a5639bb70ab959ef3" exitCode=0 Dec 01 18:53:13 crc kubenswrapper[4935]: I1201 18:53:13.561313 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2v2t8" event={"ID":"42f244f1-1b31-4831-8f12-f95ef0199c7c","Type":"ContainerDied","Data":"42ce040c8635cd9251d151775959ab831f10d26eb51c288a5639bb70ab959ef3"} Dec 01 18:53:13 crc kubenswrapper[4935]: I1201 18:53:13.881409 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g"] Dec 01 18:53:13 crc kubenswrapper[4935]: I1201 18:53:13.899043 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-6297-account-create-update-xm8f8"] Dec 01 18:53:14 crc kubenswrapper[4935]: I1201 18:53:14.595498 4935 generic.go:334] "Generic (PLEG): container finished" podID="e720e2eb-6a00-48ff-aac7-5f6cf40dfb70" containerID="53e71c09878f4a9427e1f81f62e70de9cc6930534633948a164260e3fd210689" exitCode=0 Dec 01 18:53:14 crc kubenswrapper[4935]: I1201 18:53:14.595788 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g" event={"ID":"e720e2eb-6a00-48ff-aac7-5f6cf40dfb70","Type":"ContainerDied","Data":"53e71c09878f4a9427e1f81f62e70de9cc6930534633948a164260e3fd210689"} Dec 01 18:53:14 crc kubenswrapper[4935]: I1201 18:53:14.595814 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g" event={"ID":"e720e2eb-6a00-48ff-aac7-5f6cf40dfb70","Type":"ContainerStarted","Data":"41a438f3d498fa284271175f7530cfe1d0b636cd9633f9dc7501ff96ab9dae40"} Dec 01 18:53:14 crc kubenswrapper[4935]: I1201 18:53:14.612370 4935 generic.go:334] "Generic (PLEG): container finished" podID="db74a42b-e575-4904-8c66-e51d66b66278" containerID="cfd9426175cec259574b93c4f2860b48e03bc102735a7964cbc86e06d60ffac5" exitCode=0 Dec 01 18:53:14 crc kubenswrapper[4935]: I1201 18:53:14.612445 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-6297-account-create-update-xm8f8" event={"ID":"db74a42b-e575-4904-8c66-e51d66b66278","Type":"ContainerDied","Data":"cfd9426175cec259574b93c4f2860b48e03bc102735a7964cbc86e06d60ffac5"} Dec 01 18:53:14 crc kubenswrapper[4935]: I1201 18:53:14.612475 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-6297-account-create-update-xm8f8" event={"ID":"db74a42b-e575-4904-8c66-e51d66b66278","Type":"ContainerStarted","Data":"a86f5904c7f32b48e2827ddcb65bf6159ae9a4de764944f8865f215f86fec3b1"} Dec 01 18:53:14 crc kubenswrapper[4935]: I1201 18:53:14.650863 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2046ae6b-b1cd-421d-a4b0-686e1e29c407","Type":"ContainerStarted","Data":"41a4ffdbbfee28a4f12add53b521c93865e82d7f5f460a76817396e7a217a1f1"} Dec 01 18:53:14 crc kubenswrapper[4935]: I1201 18:53:14.723464 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=13.829893225 podStartE2EDuration="1m3.723446711s" podCreationTimestamp="2025-12-01 18:52:11 +0000 UTC" firstStartedPulling="2025-12-01 18:52:23.500848585 +0000 UTC m=+1357.522477844" lastFinishedPulling="2025-12-01 18:53:13.394402061 +0000 UTC m=+1407.416031330" observedRunningTime="2025-12-01 18:53:14.717803083 +0000 UTC m=+1408.739432342" watchObservedRunningTime="2025-12-01 18:53:14.723446711 +0000 UTC m=+1408.745075970" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.101732 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.238977 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-swiftconf\") pod \"42f244f1-1b31-4831-8f12-f95ef0199c7c\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.239094 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-combined-ca-bundle\") pod \"42f244f1-1b31-4831-8f12-f95ef0199c7c\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.239187 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-dispersionconf\") pod \"42f244f1-1b31-4831-8f12-f95ef0199c7c\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.239222 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6442\" (UniqueName: \"kubernetes.io/projected/42f244f1-1b31-4831-8f12-f95ef0199c7c-kube-api-access-j6442\") pod \"42f244f1-1b31-4831-8f12-f95ef0199c7c\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.239281 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42f244f1-1b31-4831-8f12-f95ef0199c7c-scripts\") pod \"42f244f1-1b31-4831-8f12-f95ef0199c7c\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.239324 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/42f244f1-1b31-4831-8f12-f95ef0199c7c-etc-swift\") pod \"42f244f1-1b31-4831-8f12-f95ef0199c7c\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.239392 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/42f244f1-1b31-4831-8f12-f95ef0199c7c-ring-data-devices\") pod \"42f244f1-1b31-4831-8f12-f95ef0199c7c\" (UID: \"42f244f1-1b31-4831-8f12-f95ef0199c7c\") " Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.239959 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42f244f1-1b31-4831-8f12-f95ef0199c7c-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "42f244f1-1b31-4831-8f12-f95ef0199c7c" (UID: "42f244f1-1b31-4831-8f12-f95ef0199c7c"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.240313 4935 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/42f244f1-1b31-4831-8f12-f95ef0199c7c-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.240557 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42f244f1-1b31-4831-8f12-f95ef0199c7c-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "42f244f1-1b31-4831-8f12-f95ef0199c7c" (UID: "42f244f1-1b31-4831-8f12-f95ef0199c7c"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.244601 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42f244f1-1b31-4831-8f12-f95ef0199c7c-kube-api-access-j6442" (OuterVolumeSpecName: "kube-api-access-j6442") pod "42f244f1-1b31-4831-8f12-f95ef0199c7c" (UID: "42f244f1-1b31-4831-8f12-f95ef0199c7c"). InnerVolumeSpecName "kube-api-access-j6442". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.258808 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "42f244f1-1b31-4831-8f12-f95ef0199c7c" (UID: "42f244f1-1b31-4831-8f12-f95ef0199c7c"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.266136 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "42f244f1-1b31-4831-8f12-f95ef0199c7c" (UID: "42f244f1-1b31-4831-8f12-f95ef0199c7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.268721 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "42f244f1-1b31-4831-8f12-f95ef0199c7c" (UID: "42f244f1-1b31-4831-8f12-f95ef0199c7c"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.286245 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42f244f1-1b31-4831-8f12-f95ef0199c7c-scripts" (OuterVolumeSpecName: "scripts") pod "42f244f1-1b31-4831-8f12-f95ef0199c7c" (UID: "42f244f1-1b31-4831-8f12-f95ef0199c7c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.344737 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42f244f1-1b31-4831-8f12-f95ef0199c7c-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.344784 4935 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/42f244f1-1b31-4831-8f12-f95ef0199c7c-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.344800 4935 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.344832 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.344847 4935 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/42f244f1-1b31-4831-8f12-f95ef0199c7c-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.344858 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6442\" (UniqueName: \"kubernetes.io/projected/42f244f1-1b31-4831-8f12-f95ef0199c7c-kube-api-access-j6442\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.530552 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-d4ntp"] Dec 01 18:53:15 crc kubenswrapper[4935]: E1201 18:53:15.531541 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42f244f1-1b31-4831-8f12-f95ef0199c7c" containerName="swift-ring-rebalance" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.531612 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="42f244f1-1b31-4831-8f12-f95ef0199c7c" containerName="swift-ring-rebalance" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.531860 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="42f244f1-1b31-4831-8f12-f95ef0199c7c" containerName="swift-ring-rebalance" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.532579 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.534551 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-kv6rd" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.534956 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.545837 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-d4ntp"] Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.652051 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-db-sync-config-data\") pod \"glance-db-sync-d4ntp\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.652453 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-combined-ca-bundle\") pod \"glance-db-sync-d4ntp\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.652654 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8scb\" (UniqueName: \"kubernetes.io/projected/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-kube-api-access-n8scb\") pod \"glance-db-sync-d4ntp\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.652679 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-config-data\") pod \"glance-db-sync-d4ntp\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.659124 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2v2t8" event={"ID":"42f244f1-1b31-4831-8f12-f95ef0199c7c","Type":"ContainerDied","Data":"df3fe67126854f7470257339f8ff9654d766f93b50951e9cf59c772d0dca8719"} Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.659357 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df3fe67126854f7470257339f8ff9654d766f93b50951e9cf59c772d0dca8719" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.659164 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2v2t8" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.660727 4935 generic.go:334] "Generic (PLEG): container finished" podID="8bd64079-678d-43de-aeb6-6818338d5997" containerID="0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a" exitCode=0 Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.660803 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8bd64079-678d-43de-aeb6-6818338d5997","Type":"ContainerDied","Data":"0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a"} Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.792685 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8scb\" (UniqueName: \"kubernetes.io/projected/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-kube-api-access-n8scb\") pod \"glance-db-sync-d4ntp\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.792732 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-config-data\") pod \"glance-db-sync-d4ntp\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.792849 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-db-sync-config-data\") pod \"glance-db-sync-d4ntp\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.793033 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-combined-ca-bundle\") pod \"glance-db-sync-d4ntp\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.798037 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-combined-ca-bundle\") pod \"glance-db-sync-d4ntp\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.799241 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-db-sync-config-data\") pod \"glance-db-sync-d4ntp\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.799371 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-config-data\") pod \"glance-db-sync-d4ntp\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.820165 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8scb\" (UniqueName: \"kubernetes.io/projected/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-kube-api-access-n8scb\") pod \"glance-db-sync-d4ntp\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:15 crc kubenswrapper[4935]: I1201 18:53:15.901601 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-d4ntp" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.265948 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-6297-account-create-update-xm8f8" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.270911 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.405930 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e720e2eb-6a00-48ff-aac7-5f6cf40dfb70-operator-scripts\") pod \"e720e2eb-6a00-48ff-aac7-5f6cf40dfb70\" (UID: \"e720e2eb-6a00-48ff-aac7-5f6cf40dfb70\") " Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.406119 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db74a42b-e575-4904-8c66-e51d66b66278-operator-scripts\") pod \"db74a42b-e575-4904-8c66-e51d66b66278\" (UID: \"db74a42b-e575-4904-8c66-e51d66b66278\") " Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.406214 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pd8lh\" (UniqueName: \"kubernetes.io/projected/db74a42b-e575-4904-8c66-e51d66b66278-kube-api-access-pd8lh\") pod \"db74a42b-e575-4904-8c66-e51d66b66278\" (UID: \"db74a42b-e575-4904-8c66-e51d66b66278\") " Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.406336 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctdng\" (UniqueName: \"kubernetes.io/projected/e720e2eb-6a00-48ff-aac7-5f6cf40dfb70-kube-api-access-ctdng\") pod \"e720e2eb-6a00-48ff-aac7-5f6cf40dfb70\" (UID: \"e720e2eb-6a00-48ff-aac7-5f6cf40dfb70\") " Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.411082 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db74a42b-e575-4904-8c66-e51d66b66278-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "db74a42b-e575-4904-8c66-e51d66b66278" (UID: "db74a42b-e575-4904-8c66-e51d66b66278"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.411125 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e720e2eb-6a00-48ff-aac7-5f6cf40dfb70-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e720e2eb-6a00-48ff-aac7-5f6cf40dfb70" (UID: "e720e2eb-6a00-48ff-aac7-5f6cf40dfb70"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.414691 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db74a42b-e575-4904-8c66-e51d66b66278-kube-api-access-pd8lh" (OuterVolumeSpecName: "kube-api-access-pd8lh") pod "db74a42b-e575-4904-8c66-e51d66b66278" (UID: "db74a42b-e575-4904-8c66-e51d66b66278"). InnerVolumeSpecName "kube-api-access-pd8lh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.415416 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e720e2eb-6a00-48ff-aac7-5f6cf40dfb70-kube-api-access-ctdng" (OuterVolumeSpecName: "kube-api-access-ctdng") pod "e720e2eb-6a00-48ff-aac7-5f6cf40dfb70" (UID: "e720e2eb-6a00-48ff-aac7-5f6cf40dfb70"). InnerVolumeSpecName "kube-api-access-ctdng". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.512200 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctdng\" (UniqueName: \"kubernetes.io/projected/e720e2eb-6a00-48ff-aac7-5f6cf40dfb70-kube-api-access-ctdng\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.512235 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e720e2eb-6a00-48ff-aac7-5f6cf40dfb70-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.512245 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db74a42b-e575-4904-8c66-e51d66b66278-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.512254 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pd8lh\" (UniqueName: \"kubernetes.io/projected/db74a42b-e575-4904-8c66-e51d66b66278-kube-api-access-pd8lh\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.529673 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-d4ntp"] Dec 01 18:53:16 crc kubenswrapper[4935]: W1201 18:53:16.580778 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a7ac48a_042f_4d13_a9ac_d8449e732bbf.slice/crio-533a054e887b2c51e54f1df125742c43f1355eb6f20c9e2400bea75c77e87fa5 WatchSource:0}: Error finding container 533a054e887b2c51e54f1df125742c43f1355eb6f20c9e2400bea75c77e87fa5: Status 404 returned error can't find the container with id 533a054e887b2c51e54f1df125742c43f1355eb6f20c9e2400bea75c77e87fa5 Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.669835 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-d4ntp" event={"ID":"1a7ac48a-042f-4d13-a9ac-d8449e732bbf","Type":"ContainerStarted","Data":"533a054e887b2c51e54f1df125742c43f1355eb6f20c9e2400bea75c77e87fa5"} Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.672057 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8bd64079-678d-43de-aeb6-6818338d5997","Type":"ContainerStarted","Data":"0be5d294c5202f90c28380b88ad9abf322598572225f809dbb7af28d0693b16d"} Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.672327 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.673518 4935 generic.go:334] "Generic (PLEG): container finished" podID="fce93449-11d7-490f-9456-8f8667b9cb6d" containerID="4436d0a1aa0b94abbc6edba1f66766705f930f3895beeda29b89c6f73d2deabb" exitCode=0 Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.673635 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"fce93449-11d7-490f-9456-8f8667b9cb6d","Type":"ContainerDied","Data":"4436d0a1aa0b94abbc6edba1f66766705f930f3895beeda29b89c6f73d2deabb"} Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.675571 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g" event={"ID":"e720e2eb-6a00-48ff-aac7-5f6cf40dfb70","Type":"ContainerDied","Data":"41a438f3d498fa284271175f7530cfe1d0b636cd9633f9dc7501ff96ab9dae40"} Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.675670 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41a438f3d498fa284271175f7530cfe1d0b636cd9633f9dc7501ff96ab9dae40" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.675617 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.680355 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-6297-account-create-update-xm8f8" event={"ID":"db74a42b-e575-4904-8c66-e51d66b66278","Type":"ContainerDied","Data":"a86f5904c7f32b48e2827ddcb65bf6159ae9a4de764944f8865f215f86fec3b1"} Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.680385 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a86f5904c7f32b48e2827ddcb65bf6159ae9a4de764944f8865f215f86fec3b1" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.680399 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-6297-account-create-update-xm8f8" Dec 01 18:53:16 crc kubenswrapper[4935]: I1201 18:53:16.704178 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=61.919574087 podStartE2EDuration="1m11.704159134s" podCreationTimestamp="2025-12-01 18:52:05 +0000 UTC" firstStartedPulling="2025-12-01 18:52:23.454420758 +0000 UTC m=+1357.476050017" lastFinishedPulling="2025-12-01 18:52:33.239005795 +0000 UTC m=+1367.260635064" observedRunningTime="2025-12-01 18:53:16.699415364 +0000 UTC m=+1410.721044623" watchObservedRunningTime="2025-12-01 18:53:16.704159134 +0000 UTC m=+1410.725788393" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.560298 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mysqld-exporter-0"] Dec 01 18:53:17 crc kubenswrapper[4935]: E1201 18:53:17.560969 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e720e2eb-6a00-48ff-aac7-5f6cf40dfb70" containerName="mariadb-database-create" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.560986 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e720e2eb-6a00-48ff-aac7-5f6cf40dfb70" containerName="mariadb-database-create" Dec 01 18:53:17 crc kubenswrapper[4935]: E1201 18:53:17.561016 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db74a42b-e575-4904-8c66-e51d66b66278" containerName="mariadb-account-create-update" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.561023 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="db74a42b-e575-4904-8c66-e51d66b66278" containerName="mariadb-account-create-update" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.561225 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e720e2eb-6a00-48ff-aac7-5f6cf40dfb70" containerName="mariadb-database-create" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.561247 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="db74a42b-e575-4904-8c66-e51d66b66278" containerName="mariadb-account-create-update" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.561908 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-0" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.567648 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"mysqld-exporter-config-data" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.575742 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-0"] Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.641970 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g7lj\" (UniqueName: \"kubernetes.io/projected/4c367332-667b-47f3-b2e6-1a4c759ca7e8-kube-api-access-4g7lj\") pod \"mysqld-exporter-0\" (UID: \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\") " pod="openstack/mysqld-exporter-0" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.642101 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c367332-667b-47f3-b2e6-1a4c759ca7e8-combined-ca-bundle\") pod \"mysqld-exporter-0\" (UID: \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\") " pod="openstack/mysqld-exporter-0" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.642664 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c367332-667b-47f3-b2e6-1a4c759ca7e8-config-data\") pod \"mysqld-exporter-0\" (UID: \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\") " pod="openstack/mysqld-exporter-0" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.694096 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"fce93449-11d7-490f-9456-8f8667b9cb6d","Type":"ContainerStarted","Data":"a6b083bbcbc548037c8291d97d5dd01282c2a97b1d1b9a9866a7df79c9eed3e9"} Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.694888 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.730467 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=56.504853646 podStartE2EDuration="1m12.730444846s" podCreationTimestamp="2025-12-01 18:52:05 +0000 UTC" firstStartedPulling="2025-12-01 18:52:24.21360702 +0000 UTC m=+1358.235236279" lastFinishedPulling="2025-12-01 18:52:40.43919822 +0000 UTC m=+1374.460827479" observedRunningTime="2025-12-01 18:53:17.729272309 +0000 UTC m=+1411.750901558" watchObservedRunningTime="2025-12-01 18:53:17.730444846 +0000 UTC m=+1411.752074105" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.745233 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c367332-667b-47f3-b2e6-1a4c759ca7e8-config-data\") pod \"mysqld-exporter-0\" (UID: \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\") " pod="openstack/mysqld-exporter-0" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.745397 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g7lj\" (UniqueName: \"kubernetes.io/projected/4c367332-667b-47f3-b2e6-1a4c759ca7e8-kube-api-access-4g7lj\") pod \"mysqld-exporter-0\" (UID: \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\") " pod="openstack/mysqld-exporter-0" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.745457 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c367332-667b-47f3-b2e6-1a4c759ca7e8-combined-ca-bundle\") pod \"mysqld-exporter-0\" (UID: \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\") " pod="openstack/mysqld-exporter-0" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.753510 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c367332-667b-47f3-b2e6-1a4c759ca7e8-combined-ca-bundle\") pod \"mysqld-exporter-0\" (UID: \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\") " pod="openstack/mysqld-exporter-0" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.767740 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c367332-667b-47f3-b2e6-1a4c759ca7e8-config-data\") pod \"mysqld-exporter-0\" (UID: \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\") " pod="openstack/mysqld-exporter-0" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.791642 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g7lj\" (UniqueName: \"kubernetes.io/projected/4c367332-667b-47f3-b2e6-1a4c759ca7e8-kube-api-access-4g7lj\") pod \"mysqld-exporter-0\" (UID: \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\") " pod="openstack/mysqld-exporter-0" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.903738 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-0" Dec 01 18:53:17 crc kubenswrapper[4935]: I1201 18:53:17.934477 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-7twr7" podUID="3798fbe5-306b-43f9-8f1f-ddc928996f88" containerName="ovn-controller" probeResult="failure" output=< Dec 01 18:53:17 crc kubenswrapper[4935]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 01 18:53:17 crc kubenswrapper[4935]: > Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.063981 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.076275 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-zxbb9" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.210862 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.333223 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-7twr7-config-lgkdp"] Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.334621 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.340708 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.354560 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-7twr7-config-lgkdp"] Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.488168 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-0"] Dec 01 18:53:18 crc kubenswrapper[4935]: W1201 18:53:18.507046 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c367332_667b_47f3_b2e6_1a4c759ca7e8.slice/crio-a4784305bddedd5bd83fec2095e87e546ad663f0f7888de57212d1061b045431 WatchSource:0}: Error finding container a4784305bddedd5bd83fec2095e87e546ad663f0f7888de57212d1061b045431: Status 404 returned error can't find the container with id a4784305bddedd5bd83fec2095e87e546ad663f0f7888de57212d1061b045431 Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.516679 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-scripts\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.516765 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-log-ovn\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.516818 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckb4m\" (UniqueName: \"kubernetes.io/projected/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-kube-api-access-ckb4m\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.516848 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-additional-scripts\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.516897 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-run-ovn\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.516938 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-run\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.619877 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-run-ovn\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.619986 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-run\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.620299 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-scripts\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.620401 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-log-ovn\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.620452 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckb4m\" (UniqueName: \"kubernetes.io/projected/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-kube-api-access-ckb4m\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.620477 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-additional-scripts\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.620957 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-run\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.621221 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-additional-scripts\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.620297 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-run-ovn\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.622028 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-log-ovn\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.622387 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-scripts\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.646740 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckb4m\" (UniqueName: \"kubernetes.io/projected/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-kube-api-access-ckb4m\") pod \"ovn-controller-7twr7-config-lgkdp\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.669263 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:18 crc kubenswrapper[4935]: I1201 18:53:18.721232 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-0" event={"ID":"4c367332-667b-47f3-b2e6-1a4c759ca7e8","Type":"ContainerStarted","Data":"a4784305bddedd5bd83fec2095e87e546ad663f0f7888de57212d1061b045431"} Dec 01 18:53:19 crc kubenswrapper[4935]: I1201 18:53:19.287566 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-7twr7-config-lgkdp"] Dec 01 18:53:19 crc kubenswrapper[4935]: W1201 18:53:19.302461 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4dcbaffb_0fde_418d_80c9_3e23cf779ca3.slice/crio-3f4c45445c0068e1591f0ccd35931ddb7be43a169f85bdb3387d9d4df5338b6a WatchSource:0}: Error finding container 3f4c45445c0068e1591f0ccd35931ddb7be43a169f85bdb3387d9d4df5338b6a: Status 404 returned error can't find the container with id 3f4c45445c0068e1591f0ccd35931ddb7be43a169f85bdb3387d9d4df5338b6a Dec 01 18:53:19 crc kubenswrapper[4935]: I1201 18:53:19.731347 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7twr7-config-lgkdp" event={"ID":"4dcbaffb-0fde-418d-80c9-3e23cf779ca3","Type":"ContainerStarted","Data":"2ad41ceccfefd9b117755b9f76eb8900954ae87e9d3d2ef4ffdb6a4ec7e67b24"} Dec 01 18:53:19 crc kubenswrapper[4935]: I1201 18:53:19.731747 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7twr7-config-lgkdp" event={"ID":"4dcbaffb-0fde-418d-80c9-3e23cf779ca3","Type":"ContainerStarted","Data":"3f4c45445c0068e1591f0ccd35931ddb7be43a169f85bdb3387d9d4df5338b6a"} Dec 01 18:53:19 crc kubenswrapper[4935]: I1201 18:53:19.760930 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-7twr7-config-lgkdp" podStartSLOduration=1.760901591 podStartE2EDuration="1.760901591s" podCreationTimestamp="2025-12-01 18:53:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:53:19.747351553 +0000 UTC m=+1413.768980812" watchObservedRunningTime="2025-12-01 18:53:19.760901591 +0000 UTC m=+1413.782530850" Dec 01 18:53:20 crc kubenswrapper[4935]: I1201 18:53:20.745780 4935 generic.go:334] "Generic (PLEG): container finished" podID="4dcbaffb-0fde-418d-80c9-3e23cf779ca3" containerID="2ad41ceccfefd9b117755b9f76eb8900954ae87e9d3d2ef4ffdb6a4ec7e67b24" exitCode=0 Dec 01 18:53:20 crc kubenswrapper[4935]: I1201 18:53:20.745833 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7twr7-config-lgkdp" event={"ID":"4dcbaffb-0fde-418d-80c9-3e23cf779ca3","Type":"ContainerDied","Data":"2ad41ceccfefd9b117755b9f76eb8900954ae87e9d3d2ef4ffdb6a4ec7e67b24"} Dec 01 18:53:20 crc kubenswrapper[4935]: I1201 18:53:20.770748 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mysqld-exporter-0" podStartSLOduration=1.8139921380000001 podStartE2EDuration="3.770731064s" podCreationTimestamp="2025-12-01 18:53:17 +0000 UTC" firstStartedPulling="2025-12-01 18:53:18.523468616 +0000 UTC m=+1412.545097875" lastFinishedPulling="2025-12-01 18:53:20.480207502 +0000 UTC m=+1414.501836801" observedRunningTime="2025-12-01 18:53:20.768451401 +0000 UTC m=+1414.790080660" watchObservedRunningTime="2025-12-01 18:53:20.770731064 +0000 UTC m=+1414.792360323" Dec 01 18:53:21 crc kubenswrapper[4935]: I1201 18:53:21.761283 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-0" event={"ID":"4c367332-667b-47f3-b2e6-1a4c759ca7e8","Type":"ContainerStarted","Data":"10dc53b9705f3edc7df6dc900b5169b4c47cca40d7d7aa441b4e6ed62a5a2140"} Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.161566 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.232314 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-swfxc"] Dec 01 18:53:22 crc kubenswrapper[4935]: E1201 18:53:22.232806 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dcbaffb-0fde-418d-80c9-3e23cf779ca3" containerName="ovn-config" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.232824 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dcbaffb-0fde-418d-80c9-3e23cf779ca3" containerName="ovn-config" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.233029 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dcbaffb-0fde-418d-80c9-3e23cf779ca3" containerName="ovn-config" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.234426 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.236607 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-scripts\") pod \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.237794 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-scripts" (OuterVolumeSpecName: "scripts") pod "4dcbaffb-0fde-418d-80c9-3e23cf779ca3" (UID: "4dcbaffb-0fde-418d-80c9-3e23cf779ca3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.237919 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckb4m\" (UniqueName: \"kubernetes.io/projected/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-kube-api-access-ckb4m\") pod \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.238723 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-run\") pod \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.238927 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-additional-scripts\") pod \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.238795 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-run" (OuterVolumeSpecName: "var-run") pod "4dcbaffb-0fde-418d-80c9-3e23cf779ca3" (UID: "4dcbaffb-0fde-418d-80c9-3e23cf779ca3"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.238982 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-run-ovn\") pod \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.239829 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "4dcbaffb-0fde-418d-80c9-3e23cf779ca3" (UID: "4dcbaffb-0fde-418d-80c9-3e23cf779ca3"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.239880 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "4dcbaffb-0fde-418d-80c9-3e23cf779ca3" (UID: "4dcbaffb-0fde-418d-80c9-3e23cf779ca3"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.240016 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-log-ovn\") pod \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\" (UID: \"4dcbaffb-0fde-418d-80c9-3e23cf779ca3\") " Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.240082 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "4dcbaffb-0fde-418d-80c9-3e23cf779ca3" (UID: "4dcbaffb-0fde-418d-80c9-3e23cf779ca3"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.240674 4935 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-run\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.240714 4935 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.240726 4935 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.240734 4935 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.240742 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.245861 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-swfxc"] Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.272341 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-kube-api-access-ckb4m" (OuterVolumeSpecName: "kube-api-access-ckb4m") pod "4dcbaffb-0fde-418d-80c9-3e23cf779ca3" (UID: "4dcbaffb-0fde-418d-80c9-3e23cf779ca3"). InnerVolumeSpecName "kube-api-access-ckb4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.342896 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4336b794-da9d-464c-87f0-e22f1041f630-utilities\") pod \"redhat-marketplace-swfxc\" (UID: \"4336b794-da9d-464c-87f0-e22f1041f630\") " pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.343044 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4336b794-da9d-464c-87f0-e22f1041f630-catalog-content\") pod \"redhat-marketplace-swfxc\" (UID: \"4336b794-da9d-464c-87f0-e22f1041f630\") " pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.343227 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58jxt\" (UniqueName: \"kubernetes.io/projected/4336b794-da9d-464c-87f0-e22f1041f630-kube-api-access-58jxt\") pod \"redhat-marketplace-swfxc\" (UID: \"4336b794-da9d-464c-87f0-e22f1041f630\") " pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.343362 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckb4m\" (UniqueName: \"kubernetes.io/projected/4dcbaffb-0fde-418d-80c9-3e23cf779ca3-kube-api-access-ckb4m\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.444418 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4336b794-da9d-464c-87f0-e22f1041f630-utilities\") pod \"redhat-marketplace-swfxc\" (UID: \"4336b794-da9d-464c-87f0-e22f1041f630\") " pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.444498 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4336b794-da9d-464c-87f0-e22f1041f630-catalog-content\") pod \"redhat-marketplace-swfxc\" (UID: \"4336b794-da9d-464c-87f0-e22f1041f630\") " pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.444596 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58jxt\" (UniqueName: \"kubernetes.io/projected/4336b794-da9d-464c-87f0-e22f1041f630-kube-api-access-58jxt\") pod \"redhat-marketplace-swfxc\" (UID: \"4336b794-da9d-464c-87f0-e22f1041f630\") " pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.444880 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4336b794-da9d-464c-87f0-e22f1041f630-catalog-content\") pod \"redhat-marketplace-swfxc\" (UID: \"4336b794-da9d-464c-87f0-e22f1041f630\") " pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.445139 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4336b794-da9d-464c-87f0-e22f1041f630-utilities\") pod \"redhat-marketplace-swfxc\" (UID: \"4336b794-da9d-464c-87f0-e22f1041f630\") " pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.476324 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58jxt\" (UniqueName: \"kubernetes.io/projected/4336b794-da9d-464c-87f0-e22f1041f630-kube-api-access-58jxt\") pod \"redhat-marketplace-swfxc\" (UID: \"4336b794-da9d-464c-87f0-e22f1041f630\") " pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.590583 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.799434 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-7twr7-config-lgkdp" event={"ID":"4dcbaffb-0fde-418d-80c9-3e23cf779ca3","Type":"ContainerDied","Data":"3f4c45445c0068e1591f0ccd35931ddb7be43a169f85bdb3387d9d4df5338b6a"} Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.799906 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f4c45445c0068e1591f0ccd35931ddb7be43a169f85bdb3387d9d4df5338b6a" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.808355 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-7twr7-config-lgkdp" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.903185 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-7twr7-config-lgkdp"] Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.913837 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-7twr7-config-lgkdp"] Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.931382 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-7twr7" Dec 01 18:53:22 crc kubenswrapper[4935]: I1201 18:53:22.983251 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-swfxc"] Dec 01 18:53:23 crc kubenswrapper[4935]: I1201 18:53:23.821019 4935 generic.go:334] "Generic (PLEG): container finished" podID="4336b794-da9d-464c-87f0-e22f1041f630" containerID="a5c6e713594cadb4c3a9fd1af5d272a6fa695461cae3d89f29aa090f727d0d64" exitCode=0 Dec 01 18:53:23 crc kubenswrapper[4935]: I1201 18:53:23.821237 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-swfxc" event={"ID":"4336b794-da9d-464c-87f0-e22f1041f630","Type":"ContainerDied","Data":"a5c6e713594cadb4c3a9fd1af5d272a6fa695461cae3d89f29aa090f727d0d64"} Dec 01 18:53:23 crc kubenswrapper[4935]: I1201 18:53:23.821616 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-swfxc" event={"ID":"4336b794-da9d-464c-87f0-e22f1041f630","Type":"ContainerStarted","Data":"d05669b5ecdba31f6469b87cdf0083ad37ada6babd0fecb316c12b4eb0e36d86"} Dec 01 18:53:24 crc kubenswrapper[4935]: I1201 18:53:24.522250 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dcbaffb-0fde-418d-80c9-3e23cf779ca3" path="/var/lib/kubelet/pods/4dcbaffb-0fde-418d-80c9-3e23cf779ca3/volumes" Dec 01 18:53:25 crc kubenswrapper[4935]: I1201 18:53:25.008784 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:53:25 crc kubenswrapper[4935]: I1201 18:53:25.018029 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0ee2844-1713-4b15-81f5-138cbc14fe03-etc-swift\") pod \"swift-storage-0\" (UID: \"e0ee2844-1713-4b15-81f5-138cbc14fe03\") " pod="openstack/swift-storage-0" Dec 01 18:53:25 crc kubenswrapper[4935]: I1201 18:53:25.251858 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 01 18:53:25 crc kubenswrapper[4935]: I1201 18:53:25.867475 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 01 18:53:26 crc kubenswrapper[4935]: I1201 18:53:26.414342 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 01 18:53:26 crc kubenswrapper[4935]: I1201 18:53:26.791333 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:53:26 crc kubenswrapper[4935]: I1201 18:53:26.823842 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-74w75"] Dec 01 18:53:26 crc kubenswrapper[4935]: I1201 18:53:26.825178 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-74w75" Dec 01 18:53:26 crc kubenswrapper[4935]: I1201 18:53:26.849662 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-74w75"] Dec 01 18:53:26 crc kubenswrapper[4935]: I1201 18:53:26.956381 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-20d7-account-create-update-bgg6d"] Dec 01 18:53:26 crc kubenswrapper[4935]: I1201 18:53:26.981467 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-20d7-account-create-update-bgg6d" Dec 01 18:53:26 crc kubenswrapper[4935]: I1201 18:53:26.984432 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Dec 01 18:53:26 crc kubenswrapper[4935]: I1201 18:53:26.985054 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4bc86e6-3397-42cb-9b81-0ca0db2821f4-operator-scripts\") pod \"barbican-db-create-74w75\" (UID: \"f4bc86e6-3397-42cb-9b81-0ca0db2821f4\") " pod="openstack/barbican-db-create-74w75" Dec 01 18:53:26 crc kubenswrapper[4935]: I1201 18:53:26.985121 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgbrm\" (UniqueName: \"kubernetes.io/projected/f4bc86e6-3397-42cb-9b81-0ca0db2821f4-kube-api-access-dgbrm\") pod \"barbican-db-create-74w75\" (UID: \"f4bc86e6-3397-42cb-9b81-0ca0db2821f4\") " pod="openstack/barbican-db-create-74w75" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.024247 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-d7l9s"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.037501 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-20d7-account-create-update-bgg6d"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.037628 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-d7l9s" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.060197 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-d7l9s"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.097801 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4bc86e6-3397-42cb-9b81-0ca0db2821f4-operator-scripts\") pod \"barbican-db-create-74w75\" (UID: \"f4bc86e6-3397-42cb-9b81-0ca0db2821f4\") " pod="openstack/barbican-db-create-74w75" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.098084 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgbrm\" (UniqueName: \"kubernetes.io/projected/f4bc86e6-3397-42cb-9b81-0ca0db2821f4-kube-api-access-dgbrm\") pod \"barbican-db-create-74w75\" (UID: \"f4bc86e6-3397-42cb-9b81-0ca0db2821f4\") " pod="openstack/barbican-db-create-74w75" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.098216 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtfsx\" (UniqueName: \"kubernetes.io/projected/b468f617-1ac4-4187-a32a-a35e87881f70-kube-api-access-xtfsx\") pod \"cinder-db-create-d7l9s\" (UID: \"b468f617-1ac4-4187-a32a-a35e87881f70\") " pod="openstack/cinder-db-create-d7l9s" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.098378 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6318f430-920a-42fa-82fa-3543844bb06a-operator-scripts\") pod \"heat-20d7-account-create-update-bgg6d\" (UID: \"6318f430-920a-42fa-82fa-3543844bb06a\") " pod="openstack/heat-20d7-account-create-update-bgg6d" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.098462 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b468f617-1ac4-4187-a32a-a35e87881f70-operator-scripts\") pod \"cinder-db-create-d7l9s\" (UID: \"b468f617-1ac4-4187-a32a-a35e87881f70\") " pod="openstack/cinder-db-create-d7l9s" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.098542 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggwsk\" (UniqueName: \"kubernetes.io/projected/6318f430-920a-42fa-82fa-3543844bb06a-kube-api-access-ggwsk\") pod \"heat-20d7-account-create-update-bgg6d\" (UID: \"6318f430-920a-42fa-82fa-3543844bb06a\") " pod="openstack/heat-20d7-account-create-update-bgg6d" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.099252 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4bc86e6-3397-42cb-9b81-0ca0db2821f4-operator-scripts\") pod \"barbican-db-create-74w75\" (UID: \"f4bc86e6-3397-42cb-9b81-0ca0db2821f4\") " pod="openstack/barbican-db-create-74w75" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.136002 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-f9lxk"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.165415 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-ec83-account-create-update-n6hqd"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.166313 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-ec83-account-create-update-n6hqd" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.167252 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-f9lxk" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.171925 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.176654 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-f9lxk"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.193027 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-ec83-account-create-update-n6hqd"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.201002 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/350567c5-3a10-4bd8-b57c-f69aa1b581bc-operator-scripts\") pod \"barbican-ec83-account-create-update-n6hqd\" (UID: \"350567c5-3a10-4bd8-b57c-f69aa1b581bc\") " pod="openstack/barbican-ec83-account-create-update-n6hqd" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.201059 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvc4z\" (UniqueName: \"kubernetes.io/projected/709a21ee-5142-492b-9b88-6f39cb92473d-kube-api-access-fvc4z\") pod \"heat-db-create-f9lxk\" (UID: \"709a21ee-5142-492b-9b88-6f39cb92473d\") " pod="openstack/heat-db-create-f9lxk" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.201118 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6318f430-920a-42fa-82fa-3543844bb06a-operator-scripts\") pod \"heat-20d7-account-create-update-bgg6d\" (UID: \"6318f430-920a-42fa-82fa-3543844bb06a\") " pod="openstack/heat-20d7-account-create-update-bgg6d" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.201179 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b468f617-1ac4-4187-a32a-a35e87881f70-operator-scripts\") pod \"cinder-db-create-d7l9s\" (UID: \"b468f617-1ac4-4187-a32a-a35e87881f70\") " pod="openstack/cinder-db-create-d7l9s" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.201216 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggwsk\" (UniqueName: \"kubernetes.io/projected/6318f430-920a-42fa-82fa-3543844bb06a-kube-api-access-ggwsk\") pod \"heat-20d7-account-create-update-bgg6d\" (UID: \"6318f430-920a-42fa-82fa-3543844bb06a\") " pod="openstack/heat-20d7-account-create-update-bgg6d" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.201334 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq24q\" (UniqueName: \"kubernetes.io/projected/350567c5-3a10-4bd8-b57c-f69aa1b581bc-kube-api-access-fq24q\") pod \"barbican-ec83-account-create-update-n6hqd\" (UID: \"350567c5-3a10-4bd8-b57c-f69aa1b581bc\") " pod="openstack/barbican-ec83-account-create-update-n6hqd" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.201375 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtfsx\" (UniqueName: \"kubernetes.io/projected/b468f617-1ac4-4187-a32a-a35e87881f70-kube-api-access-xtfsx\") pod \"cinder-db-create-d7l9s\" (UID: \"b468f617-1ac4-4187-a32a-a35e87881f70\") " pod="openstack/cinder-db-create-d7l9s" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.201421 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/709a21ee-5142-492b-9b88-6f39cb92473d-operator-scripts\") pod \"heat-db-create-f9lxk\" (UID: \"709a21ee-5142-492b-9b88-6f39cb92473d\") " pod="openstack/heat-db-create-f9lxk" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.202111 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6318f430-920a-42fa-82fa-3543844bb06a-operator-scripts\") pod \"heat-20d7-account-create-update-bgg6d\" (UID: \"6318f430-920a-42fa-82fa-3543844bb06a\") " pod="openstack/heat-20d7-account-create-update-bgg6d" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.203662 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b468f617-1ac4-4187-a32a-a35e87881f70-operator-scripts\") pod \"cinder-db-create-d7l9s\" (UID: \"b468f617-1ac4-4187-a32a-a35e87881f70\") " pod="openstack/cinder-db-create-d7l9s" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.213922 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgbrm\" (UniqueName: \"kubernetes.io/projected/f4bc86e6-3397-42cb-9b81-0ca0db2821f4-kube-api-access-dgbrm\") pod \"barbican-db-create-74w75\" (UID: \"f4bc86e6-3397-42cb-9b81-0ca0db2821f4\") " pod="openstack/barbican-db-create-74w75" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.221222 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-lmhvh"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.222701 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-lmhvh" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.224457 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.227056 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.227359 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-9jnc7" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.229480 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.244845 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtfsx\" (UniqueName: \"kubernetes.io/projected/b468f617-1ac4-4187-a32a-a35e87881f70-kube-api-access-xtfsx\") pod \"cinder-db-create-d7l9s\" (UID: \"b468f617-1ac4-4187-a32a-a35e87881f70\") " pod="openstack/cinder-db-create-d7l9s" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.246353 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggwsk\" (UniqueName: \"kubernetes.io/projected/6318f430-920a-42fa-82fa-3543844bb06a-kube-api-access-ggwsk\") pod \"heat-20d7-account-create-update-bgg6d\" (UID: \"6318f430-920a-42fa-82fa-3543844bb06a\") " pod="openstack/heat-20d7-account-create-update-bgg6d" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.257986 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-lmhvh"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.312350 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvc4z\" (UniqueName: \"kubernetes.io/projected/709a21ee-5142-492b-9b88-6f39cb92473d-kube-api-access-fvc4z\") pod \"heat-db-create-f9lxk\" (UID: \"709a21ee-5142-492b-9b88-6f39cb92473d\") " pod="openstack/heat-db-create-f9lxk" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.312418 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e086eca-2e25-4bf7-9d95-807b71ab8945-config-data\") pod \"keystone-db-sync-lmhvh\" (UID: \"7e086eca-2e25-4bf7-9d95-807b71ab8945\") " pod="openstack/keystone-db-sync-lmhvh" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.312457 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncpsg\" (UniqueName: \"kubernetes.io/projected/7e086eca-2e25-4bf7-9d95-807b71ab8945-kube-api-access-ncpsg\") pod \"keystone-db-sync-lmhvh\" (UID: \"7e086eca-2e25-4bf7-9d95-807b71ab8945\") " pod="openstack/keystone-db-sync-lmhvh" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.312494 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e086eca-2e25-4bf7-9d95-807b71ab8945-combined-ca-bundle\") pod \"keystone-db-sync-lmhvh\" (UID: \"7e086eca-2e25-4bf7-9d95-807b71ab8945\") " pod="openstack/keystone-db-sync-lmhvh" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.312541 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq24q\" (UniqueName: \"kubernetes.io/projected/350567c5-3a10-4bd8-b57c-f69aa1b581bc-kube-api-access-fq24q\") pod \"barbican-ec83-account-create-update-n6hqd\" (UID: \"350567c5-3a10-4bd8-b57c-f69aa1b581bc\") " pod="openstack/barbican-ec83-account-create-update-n6hqd" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.312772 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/709a21ee-5142-492b-9b88-6f39cb92473d-operator-scripts\") pod \"heat-db-create-f9lxk\" (UID: \"709a21ee-5142-492b-9b88-6f39cb92473d\") " pod="openstack/heat-db-create-f9lxk" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.312886 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/350567c5-3a10-4bd8-b57c-f69aa1b581bc-operator-scripts\") pod \"barbican-ec83-account-create-update-n6hqd\" (UID: \"350567c5-3a10-4bd8-b57c-f69aa1b581bc\") " pod="openstack/barbican-ec83-account-create-update-n6hqd" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.313510 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/709a21ee-5142-492b-9b88-6f39cb92473d-operator-scripts\") pod \"heat-db-create-f9lxk\" (UID: \"709a21ee-5142-492b-9b88-6f39cb92473d\") " pod="openstack/heat-db-create-f9lxk" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.313852 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/350567c5-3a10-4bd8-b57c-f69aa1b581bc-operator-scripts\") pod \"barbican-ec83-account-create-update-n6hqd\" (UID: \"350567c5-3a10-4bd8-b57c-f69aa1b581bc\") " pod="openstack/barbican-ec83-account-create-update-n6hqd" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.316358 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-d7l9s" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.341364 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvc4z\" (UniqueName: \"kubernetes.io/projected/709a21ee-5142-492b-9b88-6f39cb92473d-kube-api-access-fvc4z\") pod \"heat-db-create-f9lxk\" (UID: \"709a21ee-5142-492b-9b88-6f39cb92473d\") " pod="openstack/heat-db-create-f9lxk" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.348366 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-20d7-account-create-update-bgg6d" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.359368 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-c7da-account-create-update-hprpg"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.360745 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c7da-account-create-update-hprpg" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.363777 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq24q\" (UniqueName: \"kubernetes.io/projected/350567c5-3a10-4bd8-b57c-f69aa1b581bc-kube-api-access-fq24q\") pod \"barbican-ec83-account-create-update-n6hqd\" (UID: \"350567c5-3a10-4bd8-b57c-f69aa1b581bc\") " pod="openstack/barbican-ec83-account-create-update-n6hqd" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.365472 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.390344 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-c7da-account-create-update-hprpg"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.414822 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/16f3dbdb-7bea-419d-accc-89a7f288c977-operator-scripts\") pod \"cinder-c7da-account-create-update-hprpg\" (UID: \"16f3dbdb-7bea-419d-accc-89a7f288c977\") " pod="openstack/cinder-c7da-account-create-update-hprpg" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.414974 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e086eca-2e25-4bf7-9d95-807b71ab8945-config-data\") pod \"keystone-db-sync-lmhvh\" (UID: \"7e086eca-2e25-4bf7-9d95-807b71ab8945\") " pod="openstack/keystone-db-sync-lmhvh" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.415008 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b8m8\" (UniqueName: \"kubernetes.io/projected/16f3dbdb-7bea-419d-accc-89a7f288c977-kube-api-access-2b8m8\") pod \"cinder-c7da-account-create-update-hprpg\" (UID: \"16f3dbdb-7bea-419d-accc-89a7f288c977\") " pod="openstack/cinder-c7da-account-create-update-hprpg" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.415045 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncpsg\" (UniqueName: \"kubernetes.io/projected/7e086eca-2e25-4bf7-9d95-807b71ab8945-kube-api-access-ncpsg\") pod \"keystone-db-sync-lmhvh\" (UID: \"7e086eca-2e25-4bf7-9d95-807b71ab8945\") " pod="openstack/keystone-db-sync-lmhvh" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.415087 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e086eca-2e25-4bf7-9d95-807b71ab8945-combined-ca-bundle\") pod \"keystone-db-sync-lmhvh\" (UID: \"7e086eca-2e25-4bf7-9d95-807b71ab8945\") " pod="openstack/keystone-db-sync-lmhvh" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.419095 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e086eca-2e25-4bf7-9d95-807b71ab8945-combined-ca-bundle\") pod \"keystone-db-sync-lmhvh\" (UID: \"7e086eca-2e25-4bf7-9d95-807b71ab8945\") " pod="openstack/keystone-db-sync-lmhvh" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.421604 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e086eca-2e25-4bf7-9d95-807b71ab8945-config-data\") pod \"keystone-db-sync-lmhvh\" (UID: \"7e086eca-2e25-4bf7-9d95-807b71ab8945\") " pod="openstack/keystone-db-sync-lmhvh" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.430859 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-ec83-account-create-update-n6hqd" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.449440 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncpsg\" (UniqueName: \"kubernetes.io/projected/7e086eca-2e25-4bf7-9d95-807b71ab8945-kube-api-access-ncpsg\") pod \"keystone-db-sync-lmhvh\" (UID: \"7e086eca-2e25-4bf7-9d95-807b71ab8945\") " pod="openstack/keystone-db-sync-lmhvh" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.450666 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-lmhvh" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.501801 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-74w75" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.517298 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/16f3dbdb-7bea-419d-accc-89a7f288c977-operator-scripts\") pod \"cinder-c7da-account-create-update-hprpg\" (UID: \"16f3dbdb-7bea-419d-accc-89a7f288c977\") " pod="openstack/cinder-c7da-account-create-update-hprpg" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.517711 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b8m8\" (UniqueName: \"kubernetes.io/projected/16f3dbdb-7bea-419d-accc-89a7f288c977-kube-api-access-2b8m8\") pod \"cinder-c7da-account-create-update-hprpg\" (UID: \"16f3dbdb-7bea-419d-accc-89a7f288c977\") " pod="openstack/cinder-c7da-account-create-update-hprpg" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.518171 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/16f3dbdb-7bea-419d-accc-89a7f288c977-operator-scripts\") pod \"cinder-c7da-account-create-update-hprpg\" (UID: \"16f3dbdb-7bea-419d-accc-89a7f288c977\") " pod="openstack/cinder-c7da-account-create-update-hprpg" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.539724 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b8m8\" (UniqueName: \"kubernetes.io/projected/16f3dbdb-7bea-419d-accc-89a7f288c977-kube-api-access-2b8m8\") pod \"cinder-c7da-account-create-update-hprpg\" (UID: \"16f3dbdb-7bea-419d-accc-89a7f288c977\") " pod="openstack/cinder-c7da-account-create-update-hprpg" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.539796 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-bwt89"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.541342 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bwt89" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.564069 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-bwt89"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.620631 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfmtg\" (UniqueName: \"kubernetes.io/projected/1d4835b9-5a99-43d2-90ef-4beafe03afa7-kube-api-access-gfmtg\") pod \"neutron-db-create-bwt89\" (UID: \"1d4835b9-5a99-43d2-90ef-4beafe03afa7\") " pod="openstack/neutron-db-create-bwt89" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.620795 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d4835b9-5a99-43d2-90ef-4beafe03afa7-operator-scripts\") pod \"neutron-db-create-bwt89\" (UID: \"1d4835b9-5a99-43d2-90ef-4beafe03afa7\") " pod="openstack/neutron-db-create-bwt89" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.632568 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dedb-account-create-update-dhdlw"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.633842 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dedb-account-create-update-dhdlw" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.638303 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-f9lxk" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.641807 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.652123 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dedb-account-create-update-dhdlw"] Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.721426 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxxvd\" (UniqueName: \"kubernetes.io/projected/dcb37c05-2790-49a1-ab92-5301bd8cb642-kube-api-access-lxxvd\") pod \"neutron-dedb-account-create-update-dhdlw\" (UID: \"dcb37c05-2790-49a1-ab92-5301bd8cb642\") " pod="openstack/neutron-dedb-account-create-update-dhdlw" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.721471 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfmtg\" (UniqueName: \"kubernetes.io/projected/1d4835b9-5a99-43d2-90ef-4beafe03afa7-kube-api-access-gfmtg\") pod \"neutron-db-create-bwt89\" (UID: \"1d4835b9-5a99-43d2-90ef-4beafe03afa7\") " pod="openstack/neutron-db-create-bwt89" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.721586 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d4835b9-5a99-43d2-90ef-4beafe03afa7-operator-scripts\") pod \"neutron-db-create-bwt89\" (UID: \"1d4835b9-5a99-43d2-90ef-4beafe03afa7\") " pod="openstack/neutron-db-create-bwt89" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.722329 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d4835b9-5a99-43d2-90ef-4beafe03afa7-operator-scripts\") pod \"neutron-db-create-bwt89\" (UID: \"1d4835b9-5a99-43d2-90ef-4beafe03afa7\") " pod="openstack/neutron-db-create-bwt89" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.721627 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcb37c05-2790-49a1-ab92-5301bd8cb642-operator-scripts\") pod \"neutron-dedb-account-create-update-dhdlw\" (UID: \"dcb37c05-2790-49a1-ab92-5301bd8cb642\") " pod="openstack/neutron-dedb-account-create-update-dhdlw" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.737563 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfmtg\" (UniqueName: \"kubernetes.io/projected/1d4835b9-5a99-43d2-90ef-4beafe03afa7-kube-api-access-gfmtg\") pod \"neutron-db-create-bwt89\" (UID: \"1d4835b9-5a99-43d2-90ef-4beafe03afa7\") " pod="openstack/neutron-db-create-bwt89" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.758004 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c7da-account-create-update-hprpg" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.823104 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxxvd\" (UniqueName: \"kubernetes.io/projected/dcb37c05-2790-49a1-ab92-5301bd8cb642-kube-api-access-lxxvd\") pod \"neutron-dedb-account-create-update-dhdlw\" (UID: \"dcb37c05-2790-49a1-ab92-5301bd8cb642\") " pod="openstack/neutron-dedb-account-create-update-dhdlw" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.823251 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcb37c05-2790-49a1-ab92-5301bd8cb642-operator-scripts\") pod \"neutron-dedb-account-create-update-dhdlw\" (UID: \"dcb37c05-2790-49a1-ab92-5301bd8cb642\") " pod="openstack/neutron-dedb-account-create-update-dhdlw" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.823930 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcb37c05-2790-49a1-ab92-5301bd8cb642-operator-scripts\") pod \"neutron-dedb-account-create-update-dhdlw\" (UID: \"dcb37c05-2790-49a1-ab92-5301bd8cb642\") " pod="openstack/neutron-dedb-account-create-update-dhdlw" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.841038 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxxvd\" (UniqueName: \"kubernetes.io/projected/dcb37c05-2790-49a1-ab92-5301bd8cb642-kube-api-access-lxxvd\") pod \"neutron-dedb-account-create-update-dhdlw\" (UID: \"dcb37c05-2790-49a1-ab92-5301bd8cb642\") " pod="openstack/neutron-dedb-account-create-update-dhdlw" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.877599 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bwt89" Dec 01 18:53:27 crc kubenswrapper[4935]: I1201 18:53:27.960444 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dedb-account-create-update-dhdlw" Dec 01 18:53:28 crc kubenswrapper[4935]: I1201 18:53:28.210696 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:28 crc kubenswrapper[4935]: I1201 18:53:28.223031 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:28 crc kubenswrapper[4935]: I1201 18:53:28.871321 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:31 crc kubenswrapper[4935]: I1201 18:53:31.366609 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 01 18:53:31 crc kubenswrapper[4935]: I1201 18:53:31.367135 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="prometheus" containerID="cri-o://dde129be6a644def858efe83416b6954e828146ddc0b245da0554f4d3fedcd65" gracePeriod=600 Dec 01 18:53:31 crc kubenswrapper[4935]: I1201 18:53:31.367435 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="thanos-sidecar" containerID="cri-o://41a4ffdbbfee28a4f12add53b521c93865e82d7f5f460a76817396e7a217a1f1" gracePeriod=600 Dec 01 18:53:31 crc kubenswrapper[4935]: I1201 18:53:31.367598 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="config-reloader" containerID="cri-o://1bbad29206b064f19ae94b6e8660e2e1b43d375841c23731992346c8000589f0" gracePeriod=600 Dec 01 18:53:31 crc kubenswrapper[4935]: I1201 18:53:31.915240 4935 generic.go:334] "Generic (PLEG): container finished" podID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerID="41a4ffdbbfee28a4f12add53b521c93865e82d7f5f460a76817396e7a217a1f1" exitCode=0 Dec 01 18:53:31 crc kubenswrapper[4935]: I1201 18:53:31.915673 4935 generic.go:334] "Generic (PLEG): container finished" podID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerID="1bbad29206b064f19ae94b6e8660e2e1b43d375841c23731992346c8000589f0" exitCode=0 Dec 01 18:53:31 crc kubenswrapper[4935]: I1201 18:53:31.915684 4935 generic.go:334] "Generic (PLEG): container finished" podID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerID="dde129be6a644def858efe83416b6954e828146ddc0b245da0554f4d3fedcd65" exitCode=0 Dec 01 18:53:31 crc kubenswrapper[4935]: I1201 18:53:31.915314 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2046ae6b-b1cd-421d-a4b0-686e1e29c407","Type":"ContainerDied","Data":"41a4ffdbbfee28a4f12add53b521c93865e82d7f5f460a76817396e7a217a1f1"} Dec 01 18:53:31 crc kubenswrapper[4935]: I1201 18:53:31.915722 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2046ae6b-b1cd-421d-a4b0-686e1e29c407","Type":"ContainerDied","Data":"1bbad29206b064f19ae94b6e8660e2e1b43d375841c23731992346c8000589f0"} Dec 01 18:53:31 crc kubenswrapper[4935]: I1201 18:53:31.915738 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2046ae6b-b1cd-421d-a4b0-686e1e29c407","Type":"ContainerDied","Data":"dde129be6a644def858efe83416b6954e828146ddc0b245da0554f4d3fedcd65"} Dec 01 18:53:33 crc kubenswrapper[4935]: I1201 18:53:33.211748 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.135:9090/-/ready\": dial tcp 10.217.0.135:9090: connect: connection refused" Dec 01 18:53:35 crc kubenswrapper[4935]: E1201 18:53:35.802240 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 01 18:53:35 crc kubenswrapper[4935]: E1201 18:53:35.803241 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n8scb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-d4ntp_openstack(1a7ac48a-042f-4d13-a9ac-d8449e732bbf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:53:35 crc kubenswrapper[4935]: E1201 18:53:35.804367 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-d4ntp" podUID="1a7ac48a-042f-4d13-a9ac-d8449e732bbf" Dec 01 18:53:35 crc kubenswrapper[4935]: I1201 18:53:35.953859 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"3ff67049cd7fd22ec46007681e5260c6688d7534b36e80955ea81ea0f1fc0962"} Dec 01 18:53:35 crc kubenswrapper[4935]: E1201 18:53:35.964425 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-d4ntp" podUID="1a7ac48a-042f-4d13-a9ac-d8449e732bbf" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.304312 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.452092 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.452193 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2046ae6b-b1cd-421d-a4b0-686e1e29c407-tls-assets\") pod \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.452629 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-config\") pod \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.452656 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-thanos-prometheus-http-client-file\") pod \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.452699 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7d89\" (UniqueName: \"kubernetes.io/projected/2046ae6b-b1cd-421d-a4b0-686e1e29c407-kube-api-access-g7d89\") pod \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.452737 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2046ae6b-b1cd-421d-a4b0-686e1e29c407-config-out\") pod \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.452790 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2046ae6b-b1cd-421d-a4b0-686e1e29c407-prometheus-metric-storage-rulefiles-0\") pod \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.452824 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-web-config\") pod \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\" (UID: \"2046ae6b-b1cd-421d-a4b0-686e1e29c407\") " Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.455443 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2046ae6b-b1cd-421d-a4b0-686e1e29c407-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "2046ae6b-b1cd-421d-a4b0-686e1e29c407" (UID: "2046ae6b-b1cd-421d-a4b0-686e1e29c407"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.460019 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2046ae6b-b1cd-421d-a4b0-686e1e29c407-kube-api-access-g7d89" (OuterVolumeSpecName: "kube-api-access-g7d89") pod "2046ae6b-b1cd-421d-a4b0-686e1e29c407" (UID: "2046ae6b-b1cd-421d-a4b0-686e1e29c407"). InnerVolumeSpecName "kube-api-access-g7d89". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.460211 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "2046ae6b-b1cd-421d-a4b0-686e1e29c407" (UID: "2046ae6b-b1cd-421d-a4b0-686e1e29c407"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.460421 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2046ae6b-b1cd-421d-a4b0-686e1e29c407-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "2046ae6b-b1cd-421d-a4b0-686e1e29c407" (UID: "2046ae6b-b1cd-421d-a4b0-686e1e29c407"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.461482 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2046ae6b-b1cd-421d-a4b0-686e1e29c407-config-out" (OuterVolumeSpecName: "config-out") pod "2046ae6b-b1cd-421d-a4b0-686e1e29c407" (UID: "2046ae6b-b1cd-421d-a4b0-686e1e29c407"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.463441 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-config" (OuterVolumeSpecName: "config") pod "2046ae6b-b1cd-421d-a4b0-686e1e29c407" (UID: "2046ae6b-b1cd-421d-a4b0-686e1e29c407"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.463763 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "2046ae6b-b1cd-421d-a4b0-686e1e29c407" (UID: "2046ae6b-b1cd-421d-a4b0-686e1e29c407"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.493694 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-web-config" (OuterVolumeSpecName: "web-config") pod "2046ae6b-b1cd-421d-a4b0-686e1e29c407" (UID: "2046ae6b-b1cd-421d-a4b0-686e1e29c407"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.562657 4935 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2046ae6b-b1cd-421d-a4b0-686e1e29c407-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.562981 4935 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-web-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.563009 4935 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.563019 4935 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2046ae6b-b1cd-421d-a4b0-686e1e29c407-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.563029 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.563039 4935 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2046ae6b-b1cd-421d-a4b0-686e1e29c407-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.563051 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7d89\" (UniqueName: \"kubernetes.io/projected/2046ae6b-b1cd-421d-a4b0-686e1e29c407-kube-api-access-g7d89\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.563063 4935 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2046ae6b-b1cd-421d-a4b0-686e1e29c407-config-out\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.586943 4935 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.665085 4935 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.734039 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-bwt89"] Dec 01 18:53:36 crc kubenswrapper[4935]: W1201 18:53:36.739309 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d4835b9_5a99_43d2_90ef_4beafe03afa7.slice/crio-2b68674db0c3031f3a8d2442c350e2df80a7d8dc2349e9613e032e8aec42a70f WatchSource:0}: Error finding container 2b68674db0c3031f3a8d2442c350e2df80a7d8dc2349e9613e032e8aec42a70f: Status 404 returned error can't find the container with id 2b68674db0c3031f3a8d2442c350e2df80a7d8dc2349e9613e032e8aec42a70f Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.976804 4935 generic.go:334] "Generic (PLEG): container finished" podID="4336b794-da9d-464c-87f0-e22f1041f630" containerID="9fb137832949a3faababfb9a00c431c02cf0ee184814e930da46648b071b30df" exitCode=0 Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.977195 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-swfxc" event={"ID":"4336b794-da9d-464c-87f0-e22f1041f630","Type":"ContainerDied","Data":"9fb137832949a3faababfb9a00c431c02cf0ee184814e930da46648b071b30df"} Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.980730 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.981335 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2046ae6b-b1cd-421d-a4b0-686e1e29c407","Type":"ContainerDied","Data":"6f80d7e561c1d0e53fd52257effb11a96633dbf784fa9ad806eb0dfb0785ca69"} Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.981387 4935 scope.go:117] "RemoveContainer" containerID="41a4ffdbbfee28a4f12add53b521c93865e82d7f5f460a76817396e7a217a1f1" Dec 01 18:53:36 crc kubenswrapper[4935]: I1201 18:53:36.986226 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-bwt89" event={"ID":"1d4835b9-5a99-43d2-90ef-4beafe03afa7","Type":"ContainerStarted","Data":"2b68674db0c3031f3a8d2442c350e2df80a7d8dc2349e9613e032e8aec42a70f"} Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.065892 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.066519 4935 scope.go:117] "RemoveContainer" containerID="1bbad29206b064f19ae94b6e8660e2e1b43d375841c23731992346c8000589f0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.078376 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.128445 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-f9lxk"] Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.150264 4935 scope.go:117] "RemoveContainer" containerID="dde129be6a644def858efe83416b6954e828146ddc0b245da0554f4d3fedcd65" Dec 01 18:53:37 crc kubenswrapper[4935]: W1201 18:53:37.170063 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod350567c5_3a10_4bd8_b57c_f69aa1b581bc.slice/crio-7bf3d7825e9ae0c09669b9b1571b19a8ebe72dd25778d989e639364e65229df1 WatchSource:0}: Error finding container 7bf3d7825e9ae0c09669b9b1571b19a8ebe72dd25778d989e639364e65229df1: Status 404 returned error can't find the container with id 7bf3d7825e9ae0c09669b9b1571b19a8ebe72dd25778d989e639364e65229df1 Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.171599 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 01 18:53:37 crc kubenswrapper[4935]: E1201 18:53:37.172160 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="config-reloader" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.172177 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="config-reloader" Dec 01 18:53:37 crc kubenswrapper[4935]: E1201 18:53:37.172207 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="prometheus" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.172215 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="prometheus" Dec 01 18:53:37 crc kubenswrapper[4935]: E1201 18:53:37.172247 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="thanos-sidecar" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.172255 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="thanos-sidecar" Dec 01 18:53:37 crc kubenswrapper[4935]: E1201 18:53:37.172267 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="init-config-reloader" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.172275 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="init-config-reloader" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.172536 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="config-reloader" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.172557 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="prometheus" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.172567 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" containerName="thanos-sidecar" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.175416 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: W1201 18:53:37.175920 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4bc86e6_3397_42cb_9b81_0ca0db2821f4.slice/crio-39316ced33c248646a26e5fdd2f126ebbd1eefc7fe1868ac94f2d4ec4cdf9738 WatchSource:0}: Error finding container 39316ced33c248646a26e5fdd2f126ebbd1eefc7fe1868ac94f2d4ec4cdf9738: Status 404 returned error can't find the container with id 39316ced33c248646a26e5fdd2f126ebbd1eefc7fe1868ac94f2d4ec4cdf9738 Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.178403 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.178500 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.178624 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-j26z8" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.178706 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 01 18:53:37 crc kubenswrapper[4935]: W1201 18:53:37.179311 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddcb37c05_2790_49a1_ab92_5301bd8cb642.slice/crio-7f0328eab088a99dc199242f3c4079e623ba57828283c88d5768b6c7d82d075f WatchSource:0}: Error finding container 7f0328eab088a99dc199242f3c4079e623ba57828283c88d5768b6c7d82d075f: Status 404 returned error can't find the container with id 7f0328eab088a99dc199242f3c4079e623ba57828283c88d5768b6c7d82d075f Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.181544 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 01 18:53:37 crc kubenswrapper[4935]: W1201 18:53:37.185482 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6318f430_920a_42fa_82fa_3543844bb06a.slice/crio-0fc69cac805ed07f5575981aef93faf56e9715153b0d5af15c176c61d381ab0b WatchSource:0}: Error finding container 0fc69cac805ed07f5575981aef93faf56e9715153b0d5af15c176c61d381ab0b: Status 404 returned error can't find the container with id 0fc69cac805ed07f5575981aef93faf56e9715153b0d5af15c176c61d381ab0b Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.187404 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.189613 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.232923 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-d7l9s"] Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.245595 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-74w75"] Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.267233 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.278750 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-ec83-account-create-update-n6hqd"] Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.280643 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.280685 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.280741 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.280768 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.280843 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/236b4030-ceae-4159-b2c9-beb3b4eca661-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.280891 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/236b4030-ceae-4159-b2c9-beb3b4eca661-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.280930 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.280971 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blggk\" (UniqueName: \"kubernetes.io/projected/236b4030-ceae-4159-b2c9-beb3b4eca661-kube-api-access-blggk\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.281008 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/236b4030-ceae-4159-b2c9-beb3b4eca661-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.281343 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-config\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.281387 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.289949 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dedb-account-create-update-dhdlw"] Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.300891 4935 scope.go:117] "RemoveContainer" containerID="b0248489c60e5fc8835d98c149d9d39763045268826d86eb62e5e34dbc7a8977" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.301582 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-c7da-account-create-update-hprpg"] Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.312404 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-20d7-account-create-update-bgg6d"] Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.322897 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-lmhvh"] Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.383668 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.384175 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/236b4030-ceae-4159-b2c9-beb3b4eca661-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.384205 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/236b4030-ceae-4159-b2c9-beb3b4eca661-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.384247 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.384272 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blggk\" (UniqueName: \"kubernetes.io/projected/236b4030-ceae-4159-b2c9-beb3b4eca661-kube-api-access-blggk\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.384311 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/236b4030-ceae-4159-b2c9-beb3b4eca661-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.384367 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-config\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.384391 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.384433 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.384454 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.384489 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.385073 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.387786 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/236b4030-ceae-4159-b2c9-beb3b4eca661-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.393665 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.393873 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.394033 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.394293 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-config\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.395052 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.395336 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/236b4030-ceae-4159-b2c9-beb3b4eca661-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.395662 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/236b4030-ceae-4159-b2c9-beb3b4eca661-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.404951 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/236b4030-ceae-4159-b2c9-beb3b4eca661-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.416562 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blggk\" (UniqueName: \"kubernetes.io/projected/236b4030-ceae-4159-b2c9-beb3b4eca661-kube-api-access-blggk\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.471465 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"prometheus-metric-storage-0\" (UID: \"236b4030-ceae-4159-b2c9-beb3b4eca661\") " pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.573253 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:37 crc kubenswrapper[4935]: I1201 18:53:37.996847 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-20d7-account-create-update-bgg6d" event={"ID":"6318f430-920a-42fa-82fa-3543844bb06a","Type":"ContainerStarted","Data":"0fc69cac805ed07f5575981aef93faf56e9715153b0d5af15c176c61d381ab0b"} Dec 01 18:53:38 crc kubenswrapper[4935]: I1201 18:53:38.000665 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-d7l9s" event={"ID":"b468f617-1ac4-4187-a32a-a35e87881f70","Type":"ContainerStarted","Data":"45238045dbe668cb68d60fd4e489919b3172477a93fa601894f4bf0167ecf814"} Dec 01 18:53:38 crc kubenswrapper[4935]: I1201 18:53:38.002045 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-f9lxk" event={"ID":"709a21ee-5142-492b-9b88-6f39cb92473d","Type":"ContainerStarted","Data":"26d851f4c3453daf872f4b1658dd14c55d0d9fd39494046d71418f1d4c599ae9"} Dec 01 18:53:38 crc kubenswrapper[4935]: I1201 18:53:38.003159 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-ec83-account-create-update-n6hqd" event={"ID":"350567c5-3a10-4bd8-b57c-f69aa1b581bc","Type":"ContainerStarted","Data":"7bf3d7825e9ae0c09669b9b1571b19a8ebe72dd25778d989e639364e65229df1"} Dec 01 18:53:38 crc kubenswrapper[4935]: I1201 18:53:38.005453 4935 generic.go:334] "Generic (PLEG): container finished" podID="1d4835b9-5a99-43d2-90ef-4beafe03afa7" containerID="9d11efdc57154e16c39f2cd0e26fc5f227c8bb2feddf55dc3b1b589543aef86f" exitCode=0 Dec 01 18:53:38 crc kubenswrapper[4935]: I1201 18:53:38.005534 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-bwt89" event={"ID":"1d4835b9-5a99-43d2-90ef-4beafe03afa7","Type":"ContainerDied","Data":"9d11efdc57154e16c39f2cd0e26fc5f227c8bb2feddf55dc3b1b589543aef86f"} Dec 01 18:53:38 crc kubenswrapper[4935]: I1201 18:53:38.007601 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dedb-account-create-update-dhdlw" event={"ID":"dcb37c05-2790-49a1-ab92-5301bd8cb642","Type":"ContainerStarted","Data":"7f0328eab088a99dc199242f3c4079e623ba57828283c88d5768b6c7d82d075f"} Dec 01 18:53:38 crc kubenswrapper[4935]: I1201 18:53:38.009225 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-74w75" event={"ID":"f4bc86e6-3397-42cb-9b81-0ca0db2821f4","Type":"ContainerStarted","Data":"39316ced33c248646a26e5fdd2f126ebbd1eefc7fe1868ac94f2d4ec4cdf9738"} Dec 01 18:53:38 crc kubenswrapper[4935]: I1201 18:53:38.010516 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c7da-account-create-update-hprpg" event={"ID":"16f3dbdb-7bea-419d-accc-89a7f288c977","Type":"ContainerStarted","Data":"8614d27174b04ca330cbbcf82bd527794e49b9a5360be6aa0acea8c5982876ca"} Dec 01 18:53:38 crc kubenswrapper[4935]: I1201 18:53:38.012374 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-lmhvh" event={"ID":"7e086eca-2e25-4bf7-9d95-807b71ab8945","Type":"ContainerStarted","Data":"f8cbc88585ff7058bb1c1685f57824a9ee506bd3322d85aa29d7e5c0bdca9022"} Dec 01 18:53:38 crc kubenswrapper[4935]: I1201 18:53:38.099433 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 01 18:53:38 crc kubenswrapper[4935]: I1201 18:53:38.521733 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2046ae6b-b1cd-421d-a4b0-686e1e29c407" path="/var/lib/kubelet/pods/2046ae6b-b1cd-421d-a4b0-686e1e29c407/volumes" Dec 01 18:53:39 crc kubenswrapper[4935]: I1201 18:53:39.036514 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"236b4030-ceae-4159-b2c9-beb3b4eca661","Type":"ContainerStarted","Data":"5e6b5e8123cadd4e1d0f889941ca6ae7a53c7da39a6b6679d0eb86ba7addd309"} Dec 01 18:53:39 crc kubenswrapper[4935]: E1201 18:53:39.894052 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod350567c5_3a10_4bd8_b57c_f69aa1b581bc.slice/crio-conmon-f282d3655ee39893ad55f6a461309e5eaa28847e561c6405b69dd3158e9614da.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6318f430_920a_42fa_82fa_3543844bb06a.slice/crio-conmon-d22b53fa3060ca41bf78d06d4affc1f754185a1f8e42cc29ab1171ff35674ccd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6318f430_920a_42fa_82fa_3543844bb06a.slice/crio-d22b53fa3060ca41bf78d06d4affc1f754185a1f8e42cc29ab1171ff35674ccd.scope\": RecentStats: unable to find data in memory cache]" Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.061792 4935 generic.go:334] "Generic (PLEG): container finished" podID="dcb37c05-2790-49a1-ab92-5301bd8cb642" containerID="9388b77900307818a2bd9915995d6fc0844b9768b8a8bf5df705398d6333b1d7" exitCode=0 Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.061885 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dedb-account-create-update-dhdlw" event={"ID":"dcb37c05-2790-49a1-ab92-5301bd8cb642","Type":"ContainerDied","Data":"9388b77900307818a2bd9915995d6fc0844b9768b8a8bf5df705398d6333b1d7"} Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.065383 4935 generic.go:334] "Generic (PLEG): container finished" podID="350567c5-3a10-4bd8-b57c-f69aa1b581bc" containerID="f282d3655ee39893ad55f6a461309e5eaa28847e561c6405b69dd3158e9614da" exitCode=0 Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.065460 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-ec83-account-create-update-n6hqd" event={"ID":"350567c5-3a10-4bd8-b57c-f69aa1b581bc","Type":"ContainerDied","Data":"f282d3655ee39893ad55f6a461309e5eaa28847e561c6405b69dd3158e9614da"} Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.069461 4935 generic.go:334] "Generic (PLEG): container finished" podID="6318f430-920a-42fa-82fa-3543844bb06a" containerID="d22b53fa3060ca41bf78d06d4affc1f754185a1f8e42cc29ab1171ff35674ccd" exitCode=0 Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.069552 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-20d7-account-create-update-bgg6d" event={"ID":"6318f430-920a-42fa-82fa-3543844bb06a","Type":"ContainerDied","Data":"d22b53fa3060ca41bf78d06d4affc1f754185a1f8e42cc29ab1171ff35674ccd"} Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.071922 4935 generic.go:334] "Generic (PLEG): container finished" podID="b468f617-1ac4-4187-a32a-a35e87881f70" containerID="c2e6634569090e74b250f5efe6a6fce97c638e390af2caf1223214d92a0623d7" exitCode=0 Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.072029 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-d7l9s" event={"ID":"b468f617-1ac4-4187-a32a-a35e87881f70","Type":"ContainerDied","Data":"c2e6634569090e74b250f5efe6a6fce97c638e390af2caf1223214d92a0623d7"} Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.076271 4935 generic.go:334] "Generic (PLEG): container finished" podID="f4bc86e6-3397-42cb-9b81-0ca0db2821f4" containerID="b42090b8cfe8b7c9bd166b7e25729becf52269876038295b5c241f5d63ac770a" exitCode=0 Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.076338 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-74w75" event={"ID":"f4bc86e6-3397-42cb-9b81-0ca0db2821f4","Type":"ContainerDied","Data":"b42090b8cfe8b7c9bd166b7e25729becf52269876038295b5c241f5d63ac770a"} Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.087437 4935 generic.go:334] "Generic (PLEG): container finished" podID="709a21ee-5142-492b-9b88-6f39cb92473d" containerID="0afd684f60748aad72733a1ddbf21870a08e431bf7883e8157dfdc3009c562f7" exitCode=0 Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.087539 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-f9lxk" event={"ID":"709a21ee-5142-492b-9b88-6f39cb92473d","Type":"ContainerDied","Data":"0afd684f60748aad72733a1ddbf21870a08e431bf7883e8157dfdc3009c562f7"} Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.091400 4935 generic.go:334] "Generic (PLEG): container finished" podID="16f3dbdb-7bea-419d-accc-89a7f288c977" containerID="a6d384affd115922c2418f4a1f7a10eb14e06b018ba9f5c2c5a996b2e146a854" exitCode=0 Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.091639 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c7da-account-create-update-hprpg" event={"ID":"16f3dbdb-7bea-419d-accc-89a7f288c977","Type":"ContainerDied","Data":"a6d384affd115922c2418f4a1f7a10eb14e06b018ba9f5c2c5a996b2e146a854"} Dec 01 18:53:40 crc kubenswrapper[4935]: I1201 18:53:40.926548 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bwt89" Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.073118 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d4835b9-5a99-43d2-90ef-4beafe03afa7-operator-scripts\") pod \"1d4835b9-5a99-43d2-90ef-4beafe03afa7\" (UID: \"1d4835b9-5a99-43d2-90ef-4beafe03afa7\") " Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.073186 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfmtg\" (UniqueName: \"kubernetes.io/projected/1d4835b9-5a99-43d2-90ef-4beafe03afa7-kube-api-access-gfmtg\") pod \"1d4835b9-5a99-43d2-90ef-4beafe03afa7\" (UID: \"1d4835b9-5a99-43d2-90ef-4beafe03afa7\") " Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.074925 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d4835b9-5a99-43d2-90ef-4beafe03afa7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1d4835b9-5a99-43d2-90ef-4beafe03afa7" (UID: "1d4835b9-5a99-43d2-90ef-4beafe03afa7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.083122 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d4835b9-5a99-43d2-90ef-4beafe03afa7-kube-api-access-gfmtg" (OuterVolumeSpecName: "kube-api-access-gfmtg") pod "1d4835b9-5a99-43d2-90ef-4beafe03afa7" (UID: "1d4835b9-5a99-43d2-90ef-4beafe03afa7"). InnerVolumeSpecName "kube-api-access-gfmtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.102369 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"e048c62492aef5352eb5be80a44adf99d8417578a8b5e1697fea869a54b51aa8"} Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.102407 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"6c2da1ab3ef7580fa0a8fe13bf491d05ad0d65f8552de05d827d63d19ed051b5"} Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.102416 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"ef510037f148afd4349df985a7cebfc722a5e0e21fdd71c8070821d507b1a62a"} Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.103994 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-bwt89" event={"ID":"1d4835b9-5a99-43d2-90ef-4beafe03afa7","Type":"ContainerDied","Data":"2b68674db0c3031f3a8d2442c350e2df80a7d8dc2349e9613e032e8aec42a70f"} Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.104013 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b68674db0c3031f3a8d2442c350e2df80a7d8dc2349e9613e032e8aec42a70f" Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.104056 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bwt89" Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.122660 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-swfxc" event={"ID":"4336b794-da9d-464c-87f0-e22f1041f630","Type":"ContainerStarted","Data":"55ca22642370ed33bfb04d34121aa1a9b760e78184c19c3511b4066434597930"} Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.183818 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d4835b9-5a99-43d2-90ef-4beafe03afa7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.183867 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfmtg\" (UniqueName: \"kubernetes.io/projected/1d4835b9-5a99-43d2-90ef-4beafe03afa7-kube-api-access-gfmtg\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:41 crc kubenswrapper[4935]: I1201 18:53:41.968577 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-swfxc" podStartSLOduration=4.1389378 podStartE2EDuration="19.968555206s" podCreationTimestamp="2025-12-01 18:53:22 +0000 UTC" firstStartedPulling="2025-12-01 18:53:23.824197977 +0000 UTC m=+1417.845827236" lastFinishedPulling="2025-12-01 18:53:39.653815373 +0000 UTC m=+1433.675444642" observedRunningTime="2025-12-01 18:53:41.163444308 +0000 UTC m=+1435.185073567" watchObservedRunningTime="2025-12-01 18:53:41.968555206 +0000 UTC m=+1435.990184465" Dec 01 18:53:42 crc kubenswrapper[4935]: I1201 18:53:42.135702 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"236b4030-ceae-4159-b2c9-beb3b4eca661","Type":"ContainerStarted","Data":"7ce034920b4bb9c189d185cd2b3656b98ea850e75784a5fac37254f1df3d1c93"} Dec 01 18:53:42 crc kubenswrapper[4935]: I1201 18:53:42.591777 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:42 crc kubenswrapper[4935]: I1201 18:53:42.593485 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:42 crc kubenswrapper[4935]: I1201 18:53:42.649838 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:43 crc kubenswrapper[4935]: I1201 18:53:43.984594 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dedb-account-create-update-dhdlw" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.012818 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-20d7-account-create-update-bgg6d" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.019887 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-ec83-account-create-update-n6hqd" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.030708 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-d7l9s" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.070811 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c7da-account-create-update-hprpg" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.096540 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-74w75" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.100947 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-f9lxk" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.151129 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcb37c05-2790-49a1-ab92-5301bd8cb642-operator-scripts\") pod \"dcb37c05-2790-49a1-ab92-5301bd8cb642\" (UID: \"dcb37c05-2790-49a1-ab92-5301bd8cb642\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.151224 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxxvd\" (UniqueName: \"kubernetes.io/projected/dcb37c05-2790-49a1-ab92-5301bd8cb642-kube-api-access-lxxvd\") pod \"dcb37c05-2790-49a1-ab92-5301bd8cb642\" (UID: \"dcb37c05-2790-49a1-ab92-5301bd8cb642\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.151291 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ggwsk\" (UniqueName: \"kubernetes.io/projected/6318f430-920a-42fa-82fa-3543844bb06a-kube-api-access-ggwsk\") pod \"6318f430-920a-42fa-82fa-3543844bb06a\" (UID: \"6318f430-920a-42fa-82fa-3543844bb06a\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.151312 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6318f430-920a-42fa-82fa-3543844bb06a-operator-scripts\") pod \"6318f430-920a-42fa-82fa-3543844bb06a\" (UID: \"6318f430-920a-42fa-82fa-3543844bb06a\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.151387 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b468f617-1ac4-4187-a32a-a35e87881f70-operator-scripts\") pod \"b468f617-1ac4-4187-a32a-a35e87881f70\" (UID: \"b468f617-1ac4-4187-a32a-a35e87881f70\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.151406 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fq24q\" (UniqueName: \"kubernetes.io/projected/350567c5-3a10-4bd8-b57c-f69aa1b581bc-kube-api-access-fq24q\") pod \"350567c5-3a10-4bd8-b57c-f69aa1b581bc\" (UID: \"350567c5-3a10-4bd8-b57c-f69aa1b581bc\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.151437 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtfsx\" (UniqueName: \"kubernetes.io/projected/b468f617-1ac4-4187-a32a-a35e87881f70-kube-api-access-xtfsx\") pod \"b468f617-1ac4-4187-a32a-a35e87881f70\" (UID: \"b468f617-1ac4-4187-a32a-a35e87881f70\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.151475 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/350567c5-3a10-4bd8-b57c-f69aa1b581bc-operator-scripts\") pod \"350567c5-3a10-4bd8-b57c-f69aa1b581bc\" (UID: \"350567c5-3a10-4bd8-b57c-f69aa1b581bc\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.152683 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/350567c5-3a10-4bd8-b57c-f69aa1b581bc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "350567c5-3a10-4bd8-b57c-f69aa1b581bc" (UID: "350567c5-3a10-4bd8-b57c-f69aa1b581bc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.153181 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcb37c05-2790-49a1-ab92-5301bd8cb642-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dcb37c05-2790-49a1-ab92-5301bd8cb642" (UID: "dcb37c05-2790-49a1-ab92-5301bd8cb642"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.153628 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6318f430-920a-42fa-82fa-3543844bb06a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6318f430-920a-42fa-82fa-3543844bb06a" (UID: "6318f430-920a-42fa-82fa-3543844bb06a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.153738 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b468f617-1ac4-4187-a32a-a35e87881f70-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b468f617-1ac4-4187-a32a-a35e87881f70" (UID: "b468f617-1ac4-4187-a32a-a35e87881f70"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.188809 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-74w75" event={"ID":"f4bc86e6-3397-42cb-9b81-0ca0db2821f4","Type":"ContainerDied","Data":"39316ced33c248646a26e5fdd2f126ebbd1eefc7fe1868ac94f2d4ec4cdf9738"} Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.188849 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39316ced33c248646a26e5fdd2f126ebbd1eefc7fe1868ac94f2d4ec4cdf9738" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.188916 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-74w75" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.197678 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-f9lxk" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.198179 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-f9lxk" event={"ID":"709a21ee-5142-492b-9b88-6f39cb92473d","Type":"ContainerDied","Data":"26d851f4c3453daf872f4b1658dd14c55d0d9fd39494046d71418f1d4c599ae9"} Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.198200 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26d851f4c3453daf872f4b1658dd14c55d0d9fd39494046d71418f1d4c599ae9" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.209114 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c7da-account-create-update-hprpg" event={"ID":"16f3dbdb-7bea-419d-accc-89a7f288c977","Type":"ContainerDied","Data":"8614d27174b04ca330cbbcf82bd527794e49b9a5360be6aa0acea8c5982876ca"} Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.209139 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8614d27174b04ca330cbbcf82bd527794e49b9a5360be6aa0acea8c5982876ca" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.209210 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c7da-account-create-update-hprpg" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.217011 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dedb-account-create-update-dhdlw" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.217296 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dedb-account-create-update-dhdlw" event={"ID":"dcb37c05-2790-49a1-ab92-5301bd8cb642","Type":"ContainerDied","Data":"7f0328eab088a99dc199242f3c4079e623ba57828283c88d5768b6c7d82d075f"} Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.217338 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f0328eab088a99dc199242f3c4079e623ba57828283c88d5768b6c7d82d075f" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.218996 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-ec83-account-create-update-n6hqd" event={"ID":"350567c5-3a10-4bd8-b57c-f69aa1b581bc","Type":"ContainerDied","Data":"7bf3d7825e9ae0c09669b9b1571b19a8ebe72dd25778d989e639364e65229df1"} Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.219020 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bf3d7825e9ae0c09669b9b1571b19a8ebe72dd25778d989e639364e65229df1" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.219078 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-ec83-account-create-update-n6hqd" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.225699 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-20d7-account-create-update-bgg6d" event={"ID":"6318f430-920a-42fa-82fa-3543844bb06a","Type":"ContainerDied","Data":"0fc69cac805ed07f5575981aef93faf56e9715153b0d5af15c176c61d381ab0b"} Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.225715 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-20d7-account-create-update-bgg6d" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.225725 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fc69cac805ed07f5575981aef93faf56e9715153b0d5af15c176c61d381ab0b" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.226552 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b468f617-1ac4-4187-a32a-a35e87881f70-kube-api-access-xtfsx" (OuterVolumeSpecName: "kube-api-access-xtfsx") pod "b468f617-1ac4-4187-a32a-a35e87881f70" (UID: "b468f617-1ac4-4187-a32a-a35e87881f70"). InnerVolumeSpecName "kube-api-access-xtfsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.227211 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcb37c05-2790-49a1-ab92-5301bd8cb642-kube-api-access-lxxvd" (OuterVolumeSpecName: "kube-api-access-lxxvd") pod "dcb37c05-2790-49a1-ab92-5301bd8cb642" (UID: "dcb37c05-2790-49a1-ab92-5301bd8cb642"). InnerVolumeSpecName "kube-api-access-lxxvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.228274 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-d7l9s" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.228485 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-d7l9s" event={"ID":"b468f617-1ac4-4187-a32a-a35e87881f70","Type":"ContainerDied","Data":"45238045dbe668cb68d60fd4e489919b3172477a93fa601894f4bf0167ecf814"} Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.228507 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45238045dbe668cb68d60fd4e489919b3172477a93fa601894f4bf0167ecf814" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.230916 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/350567c5-3a10-4bd8-b57c-f69aa1b581bc-kube-api-access-fq24q" (OuterVolumeSpecName: "kube-api-access-fq24q") pod "350567c5-3a10-4bd8-b57c-f69aa1b581bc" (UID: "350567c5-3a10-4bd8-b57c-f69aa1b581bc"). InnerVolumeSpecName "kube-api-access-fq24q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.231056 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6318f430-920a-42fa-82fa-3543844bb06a-kube-api-access-ggwsk" (OuterVolumeSpecName: "kube-api-access-ggwsk") pod "6318f430-920a-42fa-82fa-3543844bb06a" (UID: "6318f430-920a-42fa-82fa-3543844bb06a"). InnerVolumeSpecName "kube-api-access-ggwsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.253525 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4bc86e6-3397-42cb-9b81-0ca0db2821f4-operator-scripts\") pod \"f4bc86e6-3397-42cb-9b81-0ca0db2821f4\" (UID: \"f4bc86e6-3397-42cb-9b81-0ca0db2821f4\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.253564 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/709a21ee-5142-492b-9b88-6f39cb92473d-operator-scripts\") pod \"709a21ee-5142-492b-9b88-6f39cb92473d\" (UID: \"709a21ee-5142-492b-9b88-6f39cb92473d\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.253645 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b8m8\" (UniqueName: \"kubernetes.io/projected/16f3dbdb-7bea-419d-accc-89a7f288c977-kube-api-access-2b8m8\") pod \"16f3dbdb-7bea-419d-accc-89a7f288c977\" (UID: \"16f3dbdb-7bea-419d-accc-89a7f288c977\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.253671 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/16f3dbdb-7bea-419d-accc-89a7f288c977-operator-scripts\") pod \"16f3dbdb-7bea-419d-accc-89a7f288c977\" (UID: \"16f3dbdb-7bea-419d-accc-89a7f288c977\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.253696 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgbrm\" (UniqueName: \"kubernetes.io/projected/f4bc86e6-3397-42cb-9b81-0ca0db2821f4-kube-api-access-dgbrm\") pod \"f4bc86e6-3397-42cb-9b81-0ca0db2821f4\" (UID: \"f4bc86e6-3397-42cb-9b81-0ca0db2821f4\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.253810 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvc4z\" (UniqueName: \"kubernetes.io/projected/709a21ee-5142-492b-9b88-6f39cb92473d-kube-api-access-fvc4z\") pod \"709a21ee-5142-492b-9b88-6f39cb92473d\" (UID: \"709a21ee-5142-492b-9b88-6f39cb92473d\") " Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.253973 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4bc86e6-3397-42cb-9b81-0ca0db2821f4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f4bc86e6-3397-42cb-9b81-0ca0db2821f4" (UID: "f4bc86e6-3397-42cb-9b81-0ca0db2821f4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.254358 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4bc86e6-3397-42cb-9b81-0ca0db2821f4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.254382 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcb37c05-2790-49a1-ab92-5301bd8cb642-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.254391 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxxvd\" (UniqueName: \"kubernetes.io/projected/dcb37c05-2790-49a1-ab92-5301bd8cb642-kube-api-access-lxxvd\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.254403 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ggwsk\" (UniqueName: \"kubernetes.io/projected/6318f430-920a-42fa-82fa-3543844bb06a-kube-api-access-ggwsk\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.254413 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6318f430-920a-42fa-82fa-3543844bb06a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.254425 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b468f617-1ac4-4187-a32a-a35e87881f70-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.254435 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fq24q\" (UniqueName: \"kubernetes.io/projected/350567c5-3a10-4bd8-b57c-f69aa1b581bc-kube-api-access-fq24q\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.254444 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtfsx\" (UniqueName: \"kubernetes.io/projected/b468f617-1ac4-4187-a32a-a35e87881f70-kube-api-access-xtfsx\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.254453 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/350567c5-3a10-4bd8-b57c-f69aa1b581bc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.254794 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/709a21ee-5142-492b-9b88-6f39cb92473d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "709a21ee-5142-492b-9b88-6f39cb92473d" (UID: "709a21ee-5142-492b-9b88-6f39cb92473d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.255312 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16f3dbdb-7bea-419d-accc-89a7f288c977-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "16f3dbdb-7bea-419d-accc-89a7f288c977" (UID: "16f3dbdb-7bea-419d-accc-89a7f288c977"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.256902 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16f3dbdb-7bea-419d-accc-89a7f288c977-kube-api-access-2b8m8" (OuterVolumeSpecName: "kube-api-access-2b8m8") pod "16f3dbdb-7bea-419d-accc-89a7f288c977" (UID: "16f3dbdb-7bea-419d-accc-89a7f288c977"). InnerVolumeSpecName "kube-api-access-2b8m8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.257537 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/709a21ee-5142-492b-9b88-6f39cb92473d-kube-api-access-fvc4z" (OuterVolumeSpecName: "kube-api-access-fvc4z") pod "709a21ee-5142-492b-9b88-6f39cb92473d" (UID: "709a21ee-5142-492b-9b88-6f39cb92473d"). InnerVolumeSpecName "kube-api-access-fvc4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.259930 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4bc86e6-3397-42cb-9b81-0ca0db2821f4-kube-api-access-dgbrm" (OuterVolumeSpecName: "kube-api-access-dgbrm") pod "f4bc86e6-3397-42cb-9b81-0ca0db2821f4" (UID: "f4bc86e6-3397-42cb-9b81-0ca0db2821f4"). InnerVolumeSpecName "kube-api-access-dgbrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.356355 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/709a21ee-5142-492b-9b88-6f39cb92473d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.356461 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b8m8\" (UniqueName: \"kubernetes.io/projected/16f3dbdb-7bea-419d-accc-89a7f288c977-kube-api-access-2b8m8\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.356528 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/16f3dbdb-7bea-419d-accc-89a7f288c977-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.356581 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgbrm\" (UniqueName: \"kubernetes.io/projected/f4bc86e6-3397-42cb-9b81-0ca0db2821f4-kube-api-access-dgbrm\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:44 crc kubenswrapper[4935]: I1201 18:53:44.356632 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvc4z\" (UniqueName: \"kubernetes.io/projected/709a21ee-5142-492b-9b88-6f39cb92473d-kube-api-access-fvc4z\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:45 crc kubenswrapper[4935]: I1201 18:53:45.238159 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"6ee26fe2b866f19f6d71e0e195972655901798102a02dd49cd9ec3b79bc8c59e"} Dec 01 18:53:45 crc kubenswrapper[4935]: I1201 18:53:45.239338 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-lmhvh" event={"ID":"7e086eca-2e25-4bf7-9d95-807b71ab8945","Type":"ContainerStarted","Data":"6840baa43aea9b4476c18684f33dd192e041f745869b7c2a5ac6d5a2f32da409"} Dec 01 18:53:45 crc kubenswrapper[4935]: I1201 18:53:45.262536 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-lmhvh" podStartSLOduration=11.742211705999999 podStartE2EDuration="18.262516042s" podCreationTimestamp="2025-12-01 18:53:27 +0000 UTC" firstStartedPulling="2025-12-01 18:53:37.265980695 +0000 UTC m=+1431.287609954" lastFinishedPulling="2025-12-01 18:53:43.786285041 +0000 UTC m=+1437.807914290" observedRunningTime="2025-12-01 18:53:45.256207016 +0000 UTC m=+1439.277836285" watchObservedRunningTime="2025-12-01 18:53:45.262516042 +0000 UTC m=+1439.284145301" Dec 01 18:53:46 crc kubenswrapper[4935]: I1201 18:53:46.256045 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"63a31e1c91147cff6a6084a71683790a48146e07ef84326ed9a5ba666c2257d8"} Dec 01 18:53:46 crc kubenswrapper[4935]: I1201 18:53:46.256527 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"943a947e15d59093a2725f259d73749fb334dbf692d9f0b4ca2889a41c8e78b8"} Dec 01 18:53:47 crc kubenswrapper[4935]: I1201 18:53:47.283294 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"4c34b11eca0a535cb36a6c5cf6162e47da4bfc0216e5905d988105209db5fb85"} Dec 01 18:53:47 crc kubenswrapper[4935]: I1201 18:53:47.283728 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"b98669263c989302e9ae1d77c18d419ccf04b05d89791d5b80dffbe342d48f9e"} Dec 01 18:53:48 crc kubenswrapper[4935]: I1201 18:53:48.299025 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"9ce751d331ae1b1377b48a47b0a557ad0d6f7e2c1dcff71b1da2661cd33ed3de"} Dec 01 18:53:48 crc kubenswrapper[4935]: I1201 18:53:48.301433 4935 generic.go:334] "Generic (PLEG): container finished" podID="7e086eca-2e25-4bf7-9d95-807b71ab8945" containerID="6840baa43aea9b4476c18684f33dd192e041f745869b7c2a5ac6d5a2f32da409" exitCode=0 Dec 01 18:53:48 crc kubenswrapper[4935]: I1201 18:53:48.301466 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-lmhvh" event={"ID":"7e086eca-2e25-4bf7-9d95-807b71ab8945","Type":"ContainerDied","Data":"6840baa43aea9b4476c18684f33dd192e041f745869b7c2a5ac6d5a2f32da409"} Dec 01 18:53:49 crc kubenswrapper[4935]: I1201 18:53:49.336846 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"7758477ffb78af81855fd2972e651fc92a1bea838f6ef82718fde74546a57f8a"} Dec 01 18:53:49 crc kubenswrapper[4935]: I1201 18:53:49.337438 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"cead2b645a2c930abe6635a00b40fa676c07fa607e0fabd8110a64649b9cc741"} Dec 01 18:53:49 crc kubenswrapper[4935]: I1201 18:53:49.337449 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"6dc604ca4778fb72ddc1c6a5b22c9c35e759af881ed25ec5d3122eba73f65c02"} Dec 01 18:53:49 crc kubenswrapper[4935]: I1201 18:53:49.337458 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"f21e6f1c3f12b0455bf5f07895d69f4287118e2973ea1a088756ca0265cff447"} Dec 01 18:53:49 crc kubenswrapper[4935]: I1201 18:53:49.340258 4935 generic.go:334] "Generic (PLEG): container finished" podID="236b4030-ceae-4159-b2c9-beb3b4eca661" containerID="7ce034920b4bb9c189d185cd2b3656b98ea850e75784a5fac37254f1df3d1c93" exitCode=0 Dec 01 18:53:49 crc kubenswrapper[4935]: I1201 18:53:49.340436 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"236b4030-ceae-4159-b2c9-beb3b4eca661","Type":"ContainerDied","Data":"7ce034920b4bb9c189d185cd2b3656b98ea850e75784a5fac37254f1df3d1c93"} Dec 01 18:53:49 crc kubenswrapper[4935]: I1201 18:53:49.824276 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-lmhvh" Dec 01 18:53:49 crc kubenswrapper[4935]: I1201 18:53:49.967103 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncpsg\" (UniqueName: \"kubernetes.io/projected/7e086eca-2e25-4bf7-9d95-807b71ab8945-kube-api-access-ncpsg\") pod \"7e086eca-2e25-4bf7-9d95-807b71ab8945\" (UID: \"7e086eca-2e25-4bf7-9d95-807b71ab8945\") " Dec 01 18:53:49 crc kubenswrapper[4935]: I1201 18:53:49.967698 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e086eca-2e25-4bf7-9d95-807b71ab8945-config-data\") pod \"7e086eca-2e25-4bf7-9d95-807b71ab8945\" (UID: \"7e086eca-2e25-4bf7-9d95-807b71ab8945\") " Dec 01 18:53:49 crc kubenswrapper[4935]: I1201 18:53:49.967760 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e086eca-2e25-4bf7-9d95-807b71ab8945-combined-ca-bundle\") pod \"7e086eca-2e25-4bf7-9d95-807b71ab8945\" (UID: \"7e086eca-2e25-4bf7-9d95-807b71ab8945\") " Dec 01 18:53:49 crc kubenswrapper[4935]: I1201 18:53:49.972780 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e086eca-2e25-4bf7-9d95-807b71ab8945-kube-api-access-ncpsg" (OuterVolumeSpecName: "kube-api-access-ncpsg") pod "7e086eca-2e25-4bf7-9d95-807b71ab8945" (UID: "7e086eca-2e25-4bf7-9d95-807b71ab8945"). InnerVolumeSpecName "kube-api-access-ncpsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.008859 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e086eca-2e25-4bf7-9d95-807b71ab8945-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e086eca-2e25-4bf7-9d95-807b71ab8945" (UID: "7e086eca-2e25-4bf7-9d95-807b71ab8945"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.037369 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e086eca-2e25-4bf7-9d95-807b71ab8945-config-data" (OuterVolumeSpecName: "config-data") pod "7e086eca-2e25-4bf7-9d95-807b71ab8945" (UID: "7e086eca-2e25-4bf7-9d95-807b71ab8945"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.070018 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncpsg\" (UniqueName: \"kubernetes.io/projected/7e086eca-2e25-4bf7-9d95-807b71ab8945-kube-api-access-ncpsg\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.070050 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e086eca-2e25-4bf7-9d95-807b71ab8945-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.070060 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e086eca-2e25-4bf7-9d95-807b71ab8945-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.358454 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"7ba15bafdcacbce15a2e35d64f65bcbc5a7d25fb359b832e3b6ea1dc9705eb9b"} Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.358507 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0ee2844-1713-4b15-81f5-138cbc14fe03","Type":"ContainerStarted","Data":"7d6078b60f45583ff258b6cfa4431c10f9861256b1d055044a0cc6cd2b0cbe92"} Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.360550 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-lmhvh" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.360549 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-lmhvh" event={"ID":"7e086eca-2e25-4bf7-9d95-807b71ab8945","Type":"ContainerDied","Data":"f8cbc88585ff7058bb1c1685f57824a9ee506bd3322d85aa29d7e5c0bdca9022"} Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.360770 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8cbc88585ff7058bb1c1685f57824a9ee506bd3322d85aa29d7e5c0bdca9022" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.362574 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"236b4030-ceae-4159-b2c9-beb3b4eca661","Type":"ContainerStarted","Data":"c29efc98a8b3f7e86f912fe10662616daac7b920a149743c0352aef45ce9e535"} Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.401166 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=46.112514937 podStartE2EDuration="58.40112981s" podCreationTimestamp="2025-12-01 18:52:52 +0000 UTC" firstStartedPulling="2025-12-01 18:53:35.787454164 +0000 UTC m=+1429.809083423" lastFinishedPulling="2025-12-01 18:53:48.076069027 +0000 UTC m=+1442.097698296" observedRunningTime="2025-12-01 18:53:50.393547634 +0000 UTC m=+1444.415176893" watchObservedRunningTime="2025-12-01 18:53:50.40112981 +0000 UTC m=+1444.422759059" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530006 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-jwvpw"] Dec 01 18:53:50 crc kubenswrapper[4935]: E1201 18:53:50.530357 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="709a21ee-5142-492b-9b88-6f39cb92473d" containerName="mariadb-database-create" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530371 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="709a21ee-5142-492b-9b88-6f39cb92473d" containerName="mariadb-database-create" Dec 01 18:53:50 crc kubenswrapper[4935]: E1201 18:53:50.530380 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e086eca-2e25-4bf7-9d95-807b71ab8945" containerName="keystone-db-sync" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530385 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e086eca-2e25-4bf7-9d95-807b71ab8945" containerName="keystone-db-sync" Dec 01 18:53:50 crc kubenswrapper[4935]: E1201 18:53:50.530406 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcb37c05-2790-49a1-ab92-5301bd8cb642" containerName="mariadb-account-create-update" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530411 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcb37c05-2790-49a1-ab92-5301bd8cb642" containerName="mariadb-account-create-update" Dec 01 18:53:50 crc kubenswrapper[4935]: E1201 18:53:50.530430 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b468f617-1ac4-4187-a32a-a35e87881f70" containerName="mariadb-database-create" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530435 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b468f617-1ac4-4187-a32a-a35e87881f70" containerName="mariadb-database-create" Dec 01 18:53:50 crc kubenswrapper[4935]: E1201 18:53:50.530448 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d4835b9-5a99-43d2-90ef-4beafe03afa7" containerName="mariadb-database-create" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530454 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d4835b9-5a99-43d2-90ef-4beafe03afa7" containerName="mariadb-database-create" Dec 01 18:53:50 crc kubenswrapper[4935]: E1201 18:53:50.530467 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6318f430-920a-42fa-82fa-3543844bb06a" containerName="mariadb-account-create-update" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530473 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="6318f430-920a-42fa-82fa-3543844bb06a" containerName="mariadb-account-create-update" Dec 01 18:53:50 crc kubenswrapper[4935]: E1201 18:53:50.530483 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16f3dbdb-7bea-419d-accc-89a7f288c977" containerName="mariadb-account-create-update" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530489 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="16f3dbdb-7bea-419d-accc-89a7f288c977" containerName="mariadb-account-create-update" Dec 01 18:53:50 crc kubenswrapper[4935]: E1201 18:53:50.530500 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4bc86e6-3397-42cb-9b81-0ca0db2821f4" containerName="mariadb-database-create" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530505 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4bc86e6-3397-42cb-9b81-0ca0db2821f4" containerName="mariadb-database-create" Dec 01 18:53:50 crc kubenswrapper[4935]: E1201 18:53:50.530528 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="350567c5-3a10-4bd8-b57c-f69aa1b581bc" containerName="mariadb-account-create-update" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530533 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="350567c5-3a10-4bd8-b57c-f69aa1b581bc" containerName="mariadb-account-create-update" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530734 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="6318f430-920a-42fa-82fa-3543844bb06a" containerName="mariadb-account-create-update" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530753 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d4835b9-5a99-43d2-90ef-4beafe03afa7" containerName="mariadb-database-create" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530764 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e086eca-2e25-4bf7-9d95-807b71ab8945" containerName="keystone-db-sync" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530772 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="b468f617-1ac4-4187-a32a-a35e87881f70" containerName="mariadb-database-create" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530783 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4bc86e6-3397-42cb-9b81-0ca0db2821f4" containerName="mariadb-database-create" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530792 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="350567c5-3a10-4bd8-b57c-f69aa1b581bc" containerName="mariadb-account-create-update" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530808 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcb37c05-2790-49a1-ab92-5301bd8cb642" containerName="mariadb-account-create-update" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530817 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="16f3dbdb-7bea-419d-accc-89a7f288c977" containerName="mariadb-account-create-update" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.530824 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="709a21ee-5142-492b-9b88-6f39cb92473d" containerName="mariadb-database-create" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.531939 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.533211 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-jwvpw"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.543960 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-f5lxr"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.545335 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.548319 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.548565 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-9jnc7" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.548693 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.548843 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.556303 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.574829 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-f5lxr"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.693241 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-747dz"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.693808 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-combined-ca-bundle\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.693887 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5zcj\" (UniqueName: \"kubernetes.io/projected/d93bd7f8-f1b8-4834-b2bc-f841295d35da-kube-api-access-k5zcj\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.693930 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-config-data\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.693981 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-config\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.693996 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-fernet-keys\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.694043 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpm2q\" (UniqueName: \"kubernetes.io/projected/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-kube-api-access-gpm2q\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.694089 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.694746 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-747dz" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.694137 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.697660 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-scripts\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.697757 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.697788 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-credential-keys\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.701261 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-tvgrx" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.701498 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.706279 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-747dz"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.759678 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-8mddp"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.761128 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-8mddp" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.778889 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.778945 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.778982 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-kbcjk" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.795322 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-c8448"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.796908 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.803958 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804035 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-scripts\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804079 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10f9fc1e-d72b-4123-b805-82a03d56c439-combined-ca-bundle\") pod \"neutron-db-sync-8mddp\" (UID: \"10f9fc1e-d72b-4123-b805-82a03d56c439\") " pod="openstack/neutron-db-sync-8mddp" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804118 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7935698e-d40a-4c10-bf91-0a5d8855a09e-combined-ca-bundle\") pod \"heat-db-sync-747dz\" (UID: \"7935698e-d40a-4c10-bf91-0a5d8855a09e\") " pod="openstack/heat-db-sync-747dz" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804172 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804198 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-credential-keys\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804220 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/10f9fc1e-d72b-4123-b805-82a03d56c439-config\") pod \"neutron-db-sync-8mddp\" (UID: \"10f9fc1e-d72b-4123-b805-82a03d56c439\") " pod="openstack/neutron-db-sync-8mddp" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804243 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khrkh\" (UniqueName: \"kubernetes.io/projected/10f9fc1e-d72b-4123-b805-82a03d56c439-kube-api-access-khrkh\") pod \"neutron-db-sync-8mddp\" (UID: \"10f9fc1e-d72b-4123-b805-82a03d56c439\") " pod="openstack/neutron-db-sync-8mddp" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804271 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj9nw\" (UniqueName: \"kubernetes.io/projected/7935698e-d40a-4c10-bf91-0a5d8855a09e-kube-api-access-vj9nw\") pod \"heat-db-sync-747dz\" (UID: \"7935698e-d40a-4c10-bf91-0a5d8855a09e\") " pod="openstack/heat-db-sync-747dz" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804318 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-combined-ca-bundle\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804371 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5zcj\" (UniqueName: \"kubernetes.io/projected/d93bd7f8-f1b8-4834-b2bc-f841295d35da-kube-api-access-k5zcj\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804415 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-config-data\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804482 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-config\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804510 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-fernet-keys\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804580 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7935698e-d40a-4c10-bf91-0a5d8855a09e-config-data\") pod \"heat-db-sync-747dz\" (UID: \"7935698e-d40a-4c10-bf91-0a5d8855a09e\") " pod="openstack/heat-db-sync-747dz" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804610 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpm2q\" (UniqueName: \"kubernetes.io/projected/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-kube-api-access-gpm2q\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.804633 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.805957 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.806895 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-config\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.807685 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.807946 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.816632 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-ts4v5" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.816835 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.817073 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.832222 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-jwvpw"] Dec 01 18:53:50 crc kubenswrapper[4935]: E1201 18:53:50.833462 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-k5zcj], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" podUID="d93bd7f8-f1b8-4834-b2bc-f841295d35da" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.844239 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-credential-keys\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.846693 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-8mddp"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.867207 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-c8448"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.878940 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-fernet-keys\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.879119 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-scripts\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.879554 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-config-data\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.885319 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5zcj\" (UniqueName: \"kubernetes.io/projected/d93bd7f8-f1b8-4834-b2bc-f841295d35da-kube-api-access-k5zcj\") pod \"dnsmasq-dns-5c9d85d47c-jwvpw\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.885577 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpm2q\" (UniqueName: \"kubernetes.io/projected/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-kube-api-access-gpm2q\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.898562 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b868669f-shz8s"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.901022 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.902715 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-combined-ca-bundle\") pod \"keystone-bootstrap-f5lxr\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.906308 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7935698e-d40a-4c10-bf91-0a5d8855a09e-config-data\") pod \"heat-db-sync-747dz\" (UID: \"7935698e-d40a-4c10-bf91-0a5d8855a09e\") " pod="openstack/heat-db-sync-747dz" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.906372 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-ovsdbserver-nb\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.906408 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10f9fc1e-d72b-4123-b805-82a03d56c439-combined-ca-bundle\") pod \"neutron-db-sync-8mddp\" (UID: \"10f9fc1e-d72b-4123-b805-82a03d56c439\") " pod="openstack/neutron-db-sync-8mddp" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.906433 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8f827a2-a529-4371-8c82-c06377b2c9f2-etc-machine-id\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.906448 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-combined-ca-bundle\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.906465 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-dns-swift-storage-0\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.906483 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.906488 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7935698e-d40a-4c10-bf91-0a5d8855a09e-combined-ca-bundle\") pod \"heat-db-sync-747dz\" (UID: \"7935698e-d40a-4c10-bf91-0a5d8855a09e\") " pod="openstack/heat-db-sync-747dz" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.909769 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-scripts\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.909815 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/10f9fc1e-d72b-4123-b805-82a03d56c439-config\") pod \"neutron-db-sync-8mddp\" (UID: \"10f9fc1e-d72b-4123-b805-82a03d56c439\") " pod="openstack/neutron-db-sync-8mddp" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.909838 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khrkh\" (UniqueName: \"kubernetes.io/projected/10f9fc1e-d72b-4123-b805-82a03d56c439-kube-api-access-khrkh\") pod \"neutron-db-sync-8mddp\" (UID: \"10f9fc1e-d72b-4123-b805-82a03d56c439\") " pod="openstack/neutron-db-sync-8mddp" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.909859 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-config-data\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.909885 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj9nw\" (UniqueName: \"kubernetes.io/projected/7935698e-d40a-4c10-bf91-0a5d8855a09e-kube-api-access-vj9nw\") pod \"heat-db-sync-747dz\" (UID: \"7935698e-d40a-4c10-bf91-0a5d8855a09e\") " pod="openstack/heat-db-sync-747dz" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.909950 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-config\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.909967 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-db-sync-config-data\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.910015 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8jg2\" (UniqueName: \"kubernetes.io/projected/e2221836-d40a-4233-81c3-115014596886-kube-api-access-b8jg2\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.910091 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpkrf\" (UniqueName: \"kubernetes.io/projected/b8f827a2-a529-4371-8c82-c06377b2c9f2-kube-api-access-lpkrf\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.910141 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-ovsdbserver-sb\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.910280 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-dns-svc\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.912177 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10f9fc1e-d72b-4123-b805-82a03d56c439-combined-ca-bundle\") pod \"neutron-db-sync-8mddp\" (UID: \"10f9fc1e-d72b-4123-b805-82a03d56c439\") " pod="openstack/neutron-db-sync-8mddp" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.914090 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-shz8s"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.918085 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7935698e-d40a-4c10-bf91-0a5d8855a09e-combined-ca-bundle\") pod \"heat-db-sync-747dz\" (UID: \"7935698e-d40a-4c10-bf91-0a5d8855a09e\") " pod="openstack/heat-db-sync-747dz" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.921947 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7935698e-d40a-4c10-bf91-0a5d8855a09e-config-data\") pod \"heat-db-sync-747dz\" (UID: \"7935698e-d40a-4c10-bf91-0a5d8855a09e\") " pod="openstack/heat-db-sync-747dz" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.922056 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/10f9fc1e-d72b-4123-b805-82a03d56c439-config\") pod \"neutron-db-sync-8mddp\" (UID: \"10f9fc1e-d72b-4123-b805-82a03d56c439\") " pod="openstack/neutron-db-sync-8mddp" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.927980 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.934894 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-gk8gw"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.936629 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-gk8gw" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.938270 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.941204 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ffllp" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.954840 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khrkh\" (UniqueName: \"kubernetes.io/projected/10f9fc1e-d72b-4123-b805-82a03d56c439-kube-api-access-khrkh\") pod \"neutron-db-sync-8mddp\" (UID: \"10f9fc1e-d72b-4123-b805-82a03d56c439\") " pod="openstack/neutron-db-sync-8mddp" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.956162 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj9nw\" (UniqueName: \"kubernetes.io/projected/7935698e-d40a-4c10-bf91-0a5d8855a09e-kube-api-access-vj9nw\") pod \"heat-db-sync-747dz\" (UID: \"7935698e-d40a-4c10-bf91-0a5d8855a09e\") " pod="openstack/heat-db-sync-747dz" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.966592 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-gk8gw"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.993613 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-bwph6"] Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.995054 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.998620 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.998799 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 01 18:53:50 crc kubenswrapper[4935]: I1201 18:53:50.999294 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-rc6l5" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.008383 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-bwph6"] Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.013799 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8jg2\" (UniqueName: \"kubernetes.io/projected/e2221836-d40a-4233-81c3-115014596886-kube-api-access-b8jg2\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.013867 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpkrf\" (UniqueName: \"kubernetes.io/projected/b8f827a2-a529-4371-8c82-c06377b2c9f2-kube-api-access-lpkrf\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.013904 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e60c370-8ffd-4b97-a829-176da28bf116-combined-ca-bundle\") pod \"barbican-db-sync-gk8gw\" (UID: \"1e60c370-8ffd-4b97-a829-176da28bf116\") " pod="openstack/barbican-db-sync-gk8gw" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.013927 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-ovsdbserver-sb\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.013947 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-scripts\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.013985 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnpps\" (UniqueName: \"kubernetes.io/projected/1e60c370-8ffd-4b97-a829-176da28bf116-kube-api-access-pnpps\") pod \"barbican-db-sync-gk8gw\" (UID: \"1e60c370-8ffd-4b97-a829-176da28bf116\") " pod="openstack/barbican-db-sync-gk8gw" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014021 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-dns-svc\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014041 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae58d3fe-1a16-467c-b5c9-9522cb473a03-logs\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014063 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9xb4\" (UniqueName: \"kubernetes.io/projected/ae58d3fe-1a16-467c-b5c9-9522cb473a03-kube-api-access-m9xb4\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014095 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-combined-ca-bundle\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014115 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1e60c370-8ffd-4b97-a829-176da28bf116-db-sync-config-data\") pod \"barbican-db-sync-gk8gw\" (UID: \"1e60c370-8ffd-4b97-a829-176da28bf116\") " pod="openstack/barbican-db-sync-gk8gw" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014242 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-ovsdbserver-nb\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014263 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-config-data\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014301 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8f827a2-a529-4371-8c82-c06377b2c9f2-etc-machine-id\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014316 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-combined-ca-bundle\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014333 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-dns-swift-storage-0\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014352 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-scripts\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014379 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-config-data\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014408 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-config\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.014424 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-db-sync-config-data\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.015278 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-ovsdbserver-sb\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.015836 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-dns-swift-storage-0\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.015906 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8f827a2-a529-4371-8c82-c06377b2c9f2-etc-machine-id\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.016558 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-dns-svc\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.017164 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-ovsdbserver-nb\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.019103 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-config\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.026157 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-scripts\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.027532 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-combined-ca-bundle\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.028529 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-db-sync-config-data\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.031617 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-shz8s"] Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.032530 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-config-data\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.035867 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-747dz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.042111 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpkrf\" (UniqueName: \"kubernetes.io/projected/b8f827a2-a529-4371-8c82-c06377b2c9f2-kube-api-access-lpkrf\") pod \"cinder-db-sync-c8448\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.046915 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8jg2\" (UniqueName: \"kubernetes.io/projected/e2221836-d40a-4233-81c3-115014596886-kube-api-access-b8jg2\") pod \"dnsmasq-dns-5b868669f-shz8s\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.047597 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.086192 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-xdvvz"] Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.088061 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.102621 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-8mddp" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.116616 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e60c370-8ffd-4b97-a829-176da28bf116-combined-ca-bundle\") pod \"barbican-db-sync-gk8gw\" (UID: \"1e60c370-8ffd-4b97-a829-176da28bf116\") " pod="openstack/barbican-db-sync-gk8gw" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.116661 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-scripts\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.116707 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnpps\" (UniqueName: \"kubernetes.io/projected/1e60c370-8ffd-4b97-a829-176da28bf116-kube-api-access-pnpps\") pod \"barbican-db-sync-gk8gw\" (UID: \"1e60c370-8ffd-4b97-a829-176da28bf116\") " pod="openstack/barbican-db-sync-gk8gw" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.116744 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae58d3fe-1a16-467c-b5c9-9522cb473a03-logs\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.116765 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9xb4\" (UniqueName: \"kubernetes.io/projected/ae58d3fe-1a16-467c-b5c9-9522cb473a03-kube-api-access-m9xb4\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.116786 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-combined-ca-bundle\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.116804 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1e60c370-8ffd-4b97-a829-176da28bf116-db-sync-config-data\") pod \"barbican-db-sync-gk8gw\" (UID: \"1e60c370-8ffd-4b97-a829-176da28bf116\") " pod="openstack/barbican-db-sync-gk8gw" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.116850 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-config-data\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.117422 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae58d3fe-1a16-467c-b5c9-9522cb473a03-logs\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.118441 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-xdvvz"] Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.122939 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-scripts\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.123225 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e60c370-8ffd-4b97-a829-176da28bf116-combined-ca-bundle\") pod \"barbican-db-sync-gk8gw\" (UID: \"1e60c370-8ffd-4b97-a829-176da28bf116\") " pod="openstack/barbican-db-sync-gk8gw" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.124690 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-combined-ca-bundle\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.136908 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-config-data\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.146866 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.149555 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnpps\" (UniqueName: \"kubernetes.io/projected/1e60c370-8ffd-4b97-a829-176da28bf116-kube-api-access-pnpps\") pod \"barbican-db-sync-gk8gw\" (UID: \"1e60c370-8ffd-4b97-a829-176da28bf116\") " pod="openstack/barbican-db-sync-gk8gw" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.162796 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9xb4\" (UniqueName: \"kubernetes.io/projected/ae58d3fe-1a16-467c-b5c9-9522cb473a03-kube-api-access-m9xb4\") pod \"placement-db-sync-bwph6\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.165281 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1e60c370-8ffd-4b97-a829-176da28bf116-db-sync-config-data\") pod \"barbican-db-sync-gk8gw\" (UID: \"1e60c370-8ffd-4b97-a829-176da28bf116\") " pod="openstack/barbican-db-sync-gk8gw" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.180698 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.183590 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.183888 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.192306 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.227944 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-dns-svc\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.228072 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-config\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.228136 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-dns-swift-storage-0\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.229029 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-ovsdbserver-nb\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.229064 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-ovsdbserver-sb\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.229623 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-628ws\" (UniqueName: \"kubernetes.io/projected/939e12e9-7833-49fe-93f7-8ea93afac15f-kube-api-access-628ws\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333536 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-ovsdbserver-nb\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333585 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-ovsdbserver-sb\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333661 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7392ddad-969f-4a42-86a1-460f3ca2d500-run-httpd\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333690 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333728 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-config-data\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333754 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7392ddad-969f-4a42-86a1-460f3ca2d500-log-httpd\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333786 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-628ws\" (UniqueName: \"kubernetes.io/projected/939e12e9-7833-49fe-93f7-8ea93afac15f-kube-api-access-628ws\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333804 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sc5sd\" (UniqueName: \"kubernetes.io/projected/7392ddad-969f-4a42-86a1-460f3ca2d500-kube-api-access-sc5sd\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333833 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-dns-svc\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333881 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-config\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333901 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-scripts\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333935 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-dns-swift-storage-0\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333933 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-c8448" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.333956 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.334761 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-ovsdbserver-nb\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.335970 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-dns-svc\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.340096 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-config\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.340340 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-ovsdbserver-sb\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.342724 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-dns-swift-storage-0\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.367909 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-gk8gw" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.372702 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bwph6" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.376957 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-628ws\" (UniqueName: \"kubernetes.io/projected/939e12e9-7833-49fe-93f7-8ea93afac15f-kube-api-access-628ws\") pod \"dnsmasq-dns-cf78879c9-xdvvz\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.423826 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.435682 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7392ddad-969f-4a42-86a1-460f3ca2d500-log-httpd\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.435773 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sc5sd\" (UniqueName: \"kubernetes.io/projected/7392ddad-969f-4a42-86a1-460f3ca2d500-kube-api-access-sc5sd\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.435864 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-scripts\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.435919 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.436068 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7392ddad-969f-4a42-86a1-460f3ca2d500-run-httpd\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.436107 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.436177 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-config-data\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.436974 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7392ddad-969f-4a42-86a1-460f3ca2d500-log-httpd\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.438186 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7392ddad-969f-4a42-86a1-460f3ca2d500-run-httpd\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.446176 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.449985 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-scripts\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.451538 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.459232 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-d4ntp" event={"ID":"1a7ac48a-042f-4d13-a9ac-d8449e732bbf","Type":"ContainerStarted","Data":"485d989eb2289e695767dc43c6ac3f906250cd329a311eceabf214a7271ea533"} Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.459723 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.468852 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sc5sd\" (UniqueName: \"kubernetes.io/projected/7392ddad-969f-4a42-86a1-460f3ca2d500-kube-api-access-sc5sd\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.487458 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-config-data\") pod \"ceilometer-0\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.508993 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.509223 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-d4ntp" podStartSLOduration=3.8787019369999998 podStartE2EDuration="36.509203127s" podCreationTimestamp="2025-12-01 18:53:15 +0000 UTC" firstStartedPulling="2025-12-01 18:53:16.583338006 +0000 UTC m=+1410.604967265" lastFinishedPulling="2025-12-01 18:53:49.213839196 +0000 UTC m=+1443.235468455" observedRunningTime="2025-12-01 18:53:51.48767323 +0000 UTC m=+1445.509302489" watchObservedRunningTime="2025-12-01 18:53:51.509203127 +0000 UTC m=+1445.530832386" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.606128 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.649190 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5zcj\" (UniqueName: \"kubernetes.io/projected/d93bd7f8-f1b8-4834-b2bc-f841295d35da-kube-api-access-k5zcj\") pod \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.649253 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-ovsdbserver-nb\") pod \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.649333 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-config\") pod \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.649445 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-dns-svc\") pod \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.649532 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-ovsdbserver-sb\") pod \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\" (UID: \"d93bd7f8-f1b8-4834-b2bc-f841295d35da\") " Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.651286 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-config" (OuterVolumeSpecName: "config") pod "d93bd7f8-f1b8-4834-b2bc-f841295d35da" (UID: "d93bd7f8-f1b8-4834-b2bc-f841295d35da"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.651362 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d93bd7f8-f1b8-4834-b2bc-f841295d35da" (UID: "d93bd7f8-f1b8-4834-b2bc-f841295d35da"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.651835 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d93bd7f8-f1b8-4834-b2bc-f841295d35da" (UID: "d93bd7f8-f1b8-4834-b2bc-f841295d35da"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.654446 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d93bd7f8-f1b8-4834-b2bc-f841295d35da" (UID: "d93bd7f8-f1b8-4834-b2bc-f841295d35da"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.721635 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-f5lxr"] Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.726334 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d93bd7f8-f1b8-4834-b2bc-f841295d35da-kube-api-access-k5zcj" (OuterVolumeSpecName: "kube-api-access-k5zcj") pod "d93bd7f8-f1b8-4834-b2bc-f841295d35da" (UID: "d93bd7f8-f1b8-4834-b2bc-f841295d35da"). InnerVolumeSpecName "kube-api-access-k5zcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.765374 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5zcj\" (UniqueName: \"kubernetes.io/projected/d93bd7f8-f1b8-4834-b2bc-f841295d35da-kube-api-access-k5zcj\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.765401 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.765412 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.765420 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.765432 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d93bd7f8-f1b8-4834-b2bc-f841295d35da-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.835492 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-shz8s"] Dec 01 18:53:51 crc kubenswrapper[4935]: I1201 18:53:51.844005 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-747dz"] Dec 01 18:53:52 crc kubenswrapper[4935]: W1201 18:53:52.013696 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2221836_d40a_4233_81c3_115014596886.slice/crio-d3db0415d2de5d77a26b20a49480c2a1e250dc188e7960db30aeebbc7d4e2af8 WatchSource:0}: Error finding container d3db0415d2de5d77a26b20a49480c2a1e250dc188e7960db30aeebbc7d4e2af8: Status 404 returned error can't find the container with id d3db0415d2de5d77a26b20a49480c2a1e250dc188e7960db30aeebbc7d4e2af8 Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.118568 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-8mddp"] Dec 01 18:53:52 crc kubenswrapper[4935]: W1201 18:53:52.135124 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10f9fc1e_d72b_4123_b805_82a03d56c439.slice/crio-eab7ee8343d6888b022bfc134bf41599d549098b309a88b685d1414464fb4d59 WatchSource:0}: Error finding container eab7ee8343d6888b022bfc134bf41599d549098b309a88b685d1414464fb4d59: Status 404 returned error can't find the container with id eab7ee8343d6888b022bfc134bf41599d549098b309a88b685d1414464fb4d59 Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.469984 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f5lxr" event={"ID":"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8","Type":"ContainerStarted","Data":"aa79d309c00063b791014c956e7c01e21618567781c2f2d0eaf568e0892ef0b8"} Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.472227 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b868669f-shz8s" event={"ID":"e2221836-d40a-4233-81c3-115014596886","Type":"ContainerStarted","Data":"d3db0415d2de5d77a26b20a49480c2a1e250dc188e7960db30aeebbc7d4e2af8"} Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.473636 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-747dz" event={"ID":"7935698e-d40a-4c10-bf91-0a5d8855a09e","Type":"ContainerStarted","Data":"05e45234abe61a069c8acb28a90ee080b797a0f06000eb1a2ef8974c4ee7ec7a"} Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.476131 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-8mddp" event={"ID":"10f9fc1e-d72b-4123-b805-82a03d56c439","Type":"ContainerStarted","Data":"eab7ee8343d6888b022bfc134bf41599d549098b309a88b685d1414464fb4d59"} Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.476191 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-jwvpw" Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.553644 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-xdvvz"] Dec 01 18:53:52 crc kubenswrapper[4935]: W1201 18:53:52.556545 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod939e12e9_7833_49fe_93f7_8ea93afac15f.slice/crio-394bada363e378b00bba7c3679cc4c114c187f9b4d0fce535544e9df03d54686 WatchSource:0}: Error finding container 394bada363e378b00bba7c3679cc4c114c187f9b4d0fce535544e9df03d54686: Status 404 returned error can't find the container with id 394bada363e378b00bba7c3679cc4c114c187f9b4d0fce535544e9df03d54686 Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.575227 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-c8448"] Dec 01 18:53:52 crc kubenswrapper[4935]: W1201 18:53:52.587341 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae58d3fe_1a16_467c_b5c9_9522cb473a03.slice/crio-262e1a3fa876da77966e5b8a8ff2e47df03bd612154889550ca3787d143a3598 WatchSource:0}: Error finding container 262e1a3fa876da77966e5b8a8ff2e47df03bd612154889550ca3787d143a3598: Status 404 returned error can't find the container with id 262e1a3fa876da77966e5b8a8ff2e47df03bd612154889550ca3787d143a3598 Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.590448 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-bwph6"] Dec 01 18:53:52 crc kubenswrapper[4935]: W1201 18:53:52.604647 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e60c370_8ffd_4b97_a829_176da28bf116.slice/crio-b250bb0eb77d66ac3f75efd9998c353f6cb24dd3d97d9dc02a8bdda79bfec4cf WatchSource:0}: Error finding container b250bb0eb77d66ac3f75efd9998c353f6cb24dd3d97d9dc02a8bdda79bfec4cf: Status 404 returned error can't find the container with id b250bb0eb77d66ac3f75efd9998c353f6cb24dd3d97d9dc02a8bdda79bfec4cf Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.608591 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.620272 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-gk8gw"] Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.685845 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.754255 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-swfxc"] Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.812620 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-jwvpw"] Dec 01 18:53:52 crc kubenswrapper[4935]: I1201 18:53:52.818138 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-jwvpw"] Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.492600 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-gk8gw" event={"ID":"1e60c370-8ffd-4b97-a829-176da28bf116","Type":"ContainerStarted","Data":"b250bb0eb77d66ac3f75efd9998c353f6cb24dd3d97d9dc02a8bdda79bfec4cf"} Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.507824 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"236b4030-ceae-4159-b2c9-beb3b4eca661","Type":"ContainerStarted","Data":"dd7c0364cbb5f64511dd84926707e1b5f205ee4946aec7112eea1df8ec599d98"} Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.507906 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"236b4030-ceae-4159-b2c9-beb3b4eca661","Type":"ContainerStarted","Data":"6faf7bc96efc63029d68e90b599cf27df99ab0dfa759ef0bccab0a7baf9235ca"} Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.511062 4935 generic.go:334] "Generic (PLEG): container finished" podID="939e12e9-7833-49fe-93f7-8ea93afac15f" containerID="419f225fdf81cb309cd729c2228b0cd4678af7f74d70ee306231fa140be115a4" exitCode=0 Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.511931 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" event={"ID":"939e12e9-7833-49fe-93f7-8ea93afac15f","Type":"ContainerDied","Data":"419f225fdf81cb309cd729c2228b0cd4678af7f74d70ee306231fa140be115a4"} Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.512105 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" event={"ID":"939e12e9-7833-49fe-93f7-8ea93afac15f","Type":"ContainerStarted","Data":"394bada363e378b00bba7c3679cc4c114c187f9b4d0fce535544e9df03d54686"} Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.528460 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f5lxr" event={"ID":"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8","Type":"ContainerStarted","Data":"1f6e0bda07250babfb8ad978d9a7ce82f21dedc1cee57203b4539898d7ccbb24"} Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.534290 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bwph6" event={"ID":"ae58d3fe-1a16-467c-b5c9-9522cb473a03","Type":"ContainerStarted","Data":"262e1a3fa876da77966e5b8a8ff2e47df03bd612154889550ca3787d143a3598"} Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.546325 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-c8448" event={"ID":"b8f827a2-a529-4371-8c82-c06377b2c9f2","Type":"ContainerStarted","Data":"621035122a958e7d1058fa378aff571c1f561a2df5397d179bf6aa9f07c69e6f"} Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.561265 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=16.561241747 podStartE2EDuration="16.561241747s" podCreationTimestamp="2025-12-01 18:53:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:53:53.543372842 +0000 UTC m=+1447.565002111" watchObservedRunningTime="2025-12-01 18:53:53.561241747 +0000 UTC m=+1447.582871006" Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.569022 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7392ddad-969f-4a42-86a1-460f3ca2d500","Type":"ContainerStarted","Data":"9e6b292b838cdc8e7350b1a12336bb416154917bc4b6477380d2a22dc1b1859b"} Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.571409 4935 generic.go:334] "Generic (PLEG): container finished" podID="e2221836-d40a-4233-81c3-115014596886" containerID="4d285b1d9d8a463a86b8ed9580384802c46da4c8025e0644608d1f7a5f0bf0a5" exitCode=0 Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.571450 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b868669f-shz8s" event={"ID":"e2221836-d40a-4233-81c3-115014596886","Type":"ContainerDied","Data":"4d285b1d9d8a463a86b8ed9580384802c46da4c8025e0644608d1f7a5f0bf0a5"} Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.584048 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-swfxc" podUID="4336b794-da9d-464c-87f0-e22f1041f630" containerName="registry-server" containerID="cri-o://55ca22642370ed33bfb04d34121aa1a9b760e78184c19c3511b4066434597930" gracePeriod=2 Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.585340 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-8mddp" event={"ID":"10f9fc1e-d72b-4123-b805-82a03d56c439","Type":"ContainerStarted","Data":"8e07534970642545d9945301261beac2ca2b1894d54971fca61311cdd0d03f4a"} Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.647816 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-f5lxr" podStartSLOduration=3.64779651 podStartE2EDuration="3.64779651s" podCreationTimestamp="2025-12-01 18:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:53:53.595029654 +0000 UTC m=+1447.616658923" watchObservedRunningTime="2025-12-01 18:53:53.64779651 +0000 UTC m=+1447.669425769" Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.679872 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-8mddp" podStartSLOduration=3.679852174 podStartE2EDuration="3.679852174s" podCreationTimestamp="2025-12-01 18:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:53:53.663696592 +0000 UTC m=+1447.685325851" watchObservedRunningTime="2025-12-01 18:53:53.679852174 +0000 UTC m=+1447.701481433" Dec 01 18:53:53 crc kubenswrapper[4935]: I1201 18:53:53.998383 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.128023 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-config\") pod \"e2221836-d40a-4233-81c3-115014596886\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.128339 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-dns-swift-storage-0\") pod \"e2221836-d40a-4233-81c3-115014596886\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.128429 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-ovsdbserver-nb\") pod \"e2221836-d40a-4233-81c3-115014596886\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.128453 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-dns-svc\") pod \"e2221836-d40a-4233-81c3-115014596886\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.128895 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8jg2\" (UniqueName: \"kubernetes.io/projected/e2221836-d40a-4233-81c3-115014596886-kube-api-access-b8jg2\") pod \"e2221836-d40a-4233-81c3-115014596886\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.128965 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-ovsdbserver-sb\") pod \"e2221836-d40a-4233-81c3-115014596886\" (UID: \"e2221836-d40a-4233-81c3-115014596886\") " Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.144895 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2221836-d40a-4233-81c3-115014596886-kube-api-access-b8jg2" (OuterVolumeSpecName: "kube-api-access-b8jg2") pod "e2221836-d40a-4233-81c3-115014596886" (UID: "e2221836-d40a-4233-81c3-115014596886"). InnerVolumeSpecName "kube-api-access-b8jg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.173730 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e2221836-d40a-4233-81c3-115014596886" (UID: "e2221836-d40a-4233-81c3-115014596886"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.176218 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-config" (OuterVolumeSpecName: "config") pod "e2221836-d40a-4233-81c3-115014596886" (UID: "e2221836-d40a-4233-81c3-115014596886"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.176340 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e2221836-d40a-4233-81c3-115014596886" (UID: "e2221836-d40a-4233-81c3-115014596886"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.190588 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e2221836-d40a-4233-81c3-115014596886" (UID: "e2221836-d40a-4233-81c3-115014596886"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.194548 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.213966 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e2221836-d40a-4233-81c3-115014596886" (UID: "e2221836-d40a-4233-81c3-115014596886"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.215617 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.231867 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8jg2\" (UniqueName: \"kubernetes.io/projected/e2221836-d40a-4233-81c3-115014596886-kube-api-access-b8jg2\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.231896 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.231928 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.231937 4935 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.231946 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.231955 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e2221836-d40a-4233-81c3-115014596886-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.333346 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58jxt\" (UniqueName: \"kubernetes.io/projected/4336b794-da9d-464c-87f0-e22f1041f630-kube-api-access-58jxt\") pod \"4336b794-da9d-464c-87f0-e22f1041f630\" (UID: \"4336b794-da9d-464c-87f0-e22f1041f630\") " Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.333470 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4336b794-da9d-464c-87f0-e22f1041f630-utilities\") pod \"4336b794-da9d-464c-87f0-e22f1041f630\" (UID: \"4336b794-da9d-464c-87f0-e22f1041f630\") " Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.333697 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4336b794-da9d-464c-87f0-e22f1041f630-catalog-content\") pod \"4336b794-da9d-464c-87f0-e22f1041f630\" (UID: \"4336b794-da9d-464c-87f0-e22f1041f630\") " Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.336934 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4336b794-da9d-464c-87f0-e22f1041f630-utilities" (OuterVolumeSpecName: "utilities") pod "4336b794-da9d-464c-87f0-e22f1041f630" (UID: "4336b794-da9d-464c-87f0-e22f1041f630"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.340479 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4336b794-da9d-464c-87f0-e22f1041f630-kube-api-access-58jxt" (OuterVolumeSpecName: "kube-api-access-58jxt") pod "4336b794-da9d-464c-87f0-e22f1041f630" (UID: "4336b794-da9d-464c-87f0-e22f1041f630"). InnerVolumeSpecName "kube-api-access-58jxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.354072 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4336b794-da9d-464c-87f0-e22f1041f630-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4336b794-da9d-464c-87f0-e22f1041f630" (UID: "4336b794-da9d-464c-87f0-e22f1041f630"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.436441 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4336b794-da9d-464c-87f0-e22f1041f630-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.436475 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4336b794-da9d-464c-87f0-e22f1041f630-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.436488 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58jxt\" (UniqueName: \"kubernetes.io/projected/4336b794-da9d-464c-87f0-e22f1041f630-kube-api-access-58jxt\") on node \"crc\" DevicePath \"\"" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.540775 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d93bd7f8-f1b8-4834-b2bc-f841295d35da" path="/var/lib/kubelet/pods/d93bd7f8-f1b8-4834-b2bc-f841295d35da/volumes" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.619735 4935 generic.go:334] "Generic (PLEG): container finished" podID="4336b794-da9d-464c-87f0-e22f1041f630" containerID="55ca22642370ed33bfb04d34121aa1a9b760e78184c19c3511b4066434597930" exitCode=0 Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.619808 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-swfxc" event={"ID":"4336b794-da9d-464c-87f0-e22f1041f630","Type":"ContainerDied","Data":"55ca22642370ed33bfb04d34121aa1a9b760e78184c19c3511b4066434597930"} Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.619841 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-swfxc" event={"ID":"4336b794-da9d-464c-87f0-e22f1041f630","Type":"ContainerDied","Data":"d05669b5ecdba31f6469b87cdf0083ad37ada6babd0fecb316c12b4eb0e36d86"} Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.619861 4935 scope.go:117] "RemoveContainer" containerID="55ca22642370ed33bfb04d34121aa1a9b760e78184c19c3511b4066434597930" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.619992 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-swfxc" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.624229 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b868669f-shz8s" event={"ID":"e2221836-d40a-4233-81c3-115014596886","Type":"ContainerDied","Data":"d3db0415d2de5d77a26b20a49480c2a1e250dc188e7960db30aeebbc7d4e2af8"} Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.624303 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-shz8s" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.629622 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" event={"ID":"939e12e9-7833-49fe-93f7-8ea93afac15f","Type":"ContainerStarted","Data":"232e05af677a9738c1d83fb6168636f74bf6c21877e70bb5d87f2ad805516289"} Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.631043 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.664485 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-swfxc"] Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.666040 4935 scope.go:117] "RemoveContainer" containerID="9fb137832949a3faababfb9a00c431c02cf0ee184814e930da46648b071b30df" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.681214 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-swfxc"] Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.748326 4935 scope.go:117] "RemoveContainer" containerID="a5c6e713594cadb4c3a9fd1af5d272a6fa695461cae3d89f29aa090f727d0d64" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.752000 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" podStartSLOduration=3.751976588 podStartE2EDuration="3.751976588s" podCreationTimestamp="2025-12-01 18:53:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:53:54.661999398 +0000 UTC m=+1448.683628657" watchObservedRunningTime="2025-12-01 18:53:54.751976588 +0000 UTC m=+1448.773605847" Dec 01 18:53:54 crc kubenswrapper[4935]: E1201 18:53:54.757895 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4336b794_da9d_464c_87f0_e22f1041f630.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2221836_d40a_4233_81c3_115014596886.slice\": RecentStats: unable to find data in memory cache]" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.787891 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-shz8s"] Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.806289 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-shz8s"] Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.834415 4935 scope.go:117] "RemoveContainer" containerID="55ca22642370ed33bfb04d34121aa1a9b760e78184c19c3511b4066434597930" Dec 01 18:53:54 crc kubenswrapper[4935]: E1201 18:53:54.838303 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55ca22642370ed33bfb04d34121aa1a9b760e78184c19c3511b4066434597930\": container with ID starting with 55ca22642370ed33bfb04d34121aa1a9b760e78184c19c3511b4066434597930 not found: ID does not exist" containerID="55ca22642370ed33bfb04d34121aa1a9b760e78184c19c3511b4066434597930" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.838344 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55ca22642370ed33bfb04d34121aa1a9b760e78184c19c3511b4066434597930"} err="failed to get container status \"55ca22642370ed33bfb04d34121aa1a9b760e78184c19c3511b4066434597930\": rpc error: code = NotFound desc = could not find container \"55ca22642370ed33bfb04d34121aa1a9b760e78184c19c3511b4066434597930\": container with ID starting with 55ca22642370ed33bfb04d34121aa1a9b760e78184c19c3511b4066434597930 not found: ID does not exist" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.838389 4935 scope.go:117] "RemoveContainer" containerID="9fb137832949a3faababfb9a00c431c02cf0ee184814e930da46648b071b30df" Dec 01 18:53:54 crc kubenswrapper[4935]: E1201 18:53:54.842285 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fb137832949a3faababfb9a00c431c02cf0ee184814e930da46648b071b30df\": container with ID starting with 9fb137832949a3faababfb9a00c431c02cf0ee184814e930da46648b071b30df not found: ID does not exist" containerID="9fb137832949a3faababfb9a00c431c02cf0ee184814e930da46648b071b30df" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.842319 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fb137832949a3faababfb9a00c431c02cf0ee184814e930da46648b071b30df"} err="failed to get container status \"9fb137832949a3faababfb9a00c431c02cf0ee184814e930da46648b071b30df\": rpc error: code = NotFound desc = could not find container \"9fb137832949a3faababfb9a00c431c02cf0ee184814e930da46648b071b30df\": container with ID starting with 9fb137832949a3faababfb9a00c431c02cf0ee184814e930da46648b071b30df not found: ID does not exist" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.842342 4935 scope.go:117] "RemoveContainer" containerID="a5c6e713594cadb4c3a9fd1af5d272a6fa695461cae3d89f29aa090f727d0d64" Dec 01 18:53:54 crc kubenswrapper[4935]: E1201 18:53:54.844068 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5c6e713594cadb4c3a9fd1af5d272a6fa695461cae3d89f29aa090f727d0d64\": container with ID starting with a5c6e713594cadb4c3a9fd1af5d272a6fa695461cae3d89f29aa090f727d0d64 not found: ID does not exist" containerID="a5c6e713594cadb4c3a9fd1af5d272a6fa695461cae3d89f29aa090f727d0d64" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.844110 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5c6e713594cadb4c3a9fd1af5d272a6fa695461cae3d89f29aa090f727d0d64"} err="failed to get container status \"a5c6e713594cadb4c3a9fd1af5d272a6fa695461cae3d89f29aa090f727d0d64\": rpc error: code = NotFound desc = could not find container \"a5c6e713594cadb4c3a9fd1af5d272a6fa695461cae3d89f29aa090f727d0d64\": container with ID starting with a5c6e713594cadb4c3a9fd1af5d272a6fa695461cae3d89f29aa090f727d0d64 not found: ID does not exist" Dec 01 18:53:54 crc kubenswrapper[4935]: I1201 18:53:54.844291 4935 scope.go:117] "RemoveContainer" containerID="4d285b1d9d8a463a86b8ed9580384802c46da4c8025e0644608d1f7a5f0bf0a5" Dec 01 18:53:56 crc kubenswrapper[4935]: I1201 18:53:56.535956 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4336b794-da9d-464c-87f0-e22f1041f630" path="/var/lib/kubelet/pods/4336b794-da9d-464c-87f0-e22f1041f630/volumes" Dec 01 18:53:56 crc kubenswrapper[4935]: I1201 18:53:56.537900 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2221836-d40a-4233-81c3-115014596886" path="/var/lib/kubelet/pods/e2221836-d40a-4233-81c3-115014596886/volumes" Dec 01 18:53:57 crc kubenswrapper[4935]: I1201 18:53:57.574389 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 01 18:53:57 crc kubenswrapper[4935]: I1201 18:53:57.675309 4935 generic.go:334] "Generic (PLEG): container finished" podID="0a45e802-f6bc-4a6f-8a7e-c3173d4773e8" containerID="1f6e0bda07250babfb8ad978d9a7ce82f21dedc1cee57203b4539898d7ccbb24" exitCode=0 Dec 01 18:53:57 crc kubenswrapper[4935]: I1201 18:53:57.675377 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f5lxr" event={"ID":"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8","Type":"ContainerDied","Data":"1f6e0bda07250babfb8ad978d9a7ce82f21dedc1cee57203b4539898d7ccbb24"} Dec 01 18:54:01 crc kubenswrapper[4935]: I1201 18:54:01.426418 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:54:01 crc kubenswrapper[4935]: I1201 18:54:01.553629 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-cjlh4"] Dec 01 18:54:01 crc kubenswrapper[4935]: I1201 18:54:01.554074 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" podUID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" containerName="dnsmasq-dns" containerID="cri-o://f935a6d4eb7ad0ab42ad4156cf94aa42adaf17fb576bc8ae7aec2992225e7a41" gracePeriod=10 Dec 01 18:54:02 crc kubenswrapper[4935]: I1201 18:54:02.261844 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" podUID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.144:5353: connect: connection refused" Dec 01 18:54:02 crc kubenswrapper[4935]: I1201 18:54:02.752506 4935 generic.go:334] "Generic (PLEG): container finished" podID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" containerID="f935a6d4eb7ad0ab42ad4156cf94aa42adaf17fb576bc8ae7aec2992225e7a41" exitCode=0 Dec 01 18:54:02 crc kubenswrapper[4935]: I1201 18:54:02.752558 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" event={"ID":"5f14ba90-8960-4a1c-b3fb-f740cb4a6650","Type":"ContainerDied","Data":"f935a6d4eb7ad0ab42ad4156cf94aa42adaf17fb576bc8ae7aec2992225e7a41"} Dec 01 18:54:07 crc kubenswrapper[4935]: I1201 18:54:07.574130 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 01 18:54:07 crc kubenswrapper[4935]: I1201 18:54:07.581443 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 01 18:54:07 crc kubenswrapper[4935]: I1201 18:54:07.818001 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.591427 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nbbbd"] Dec 01 18:54:08 crc kubenswrapper[4935]: E1201 18:54:08.592312 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4336b794-da9d-464c-87f0-e22f1041f630" containerName="extract-content" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.592332 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4336b794-da9d-464c-87f0-e22f1041f630" containerName="extract-content" Dec 01 18:54:08 crc kubenswrapper[4935]: E1201 18:54:08.592349 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4336b794-da9d-464c-87f0-e22f1041f630" containerName="registry-server" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.592357 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4336b794-da9d-464c-87f0-e22f1041f630" containerName="registry-server" Dec 01 18:54:08 crc kubenswrapper[4935]: E1201 18:54:08.592407 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2221836-d40a-4233-81c3-115014596886" containerName="init" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.592416 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2221836-d40a-4233-81c3-115014596886" containerName="init" Dec 01 18:54:08 crc kubenswrapper[4935]: E1201 18:54:08.592435 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4336b794-da9d-464c-87f0-e22f1041f630" containerName="extract-utilities" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.592443 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4336b794-da9d-464c-87f0-e22f1041f630" containerName="extract-utilities" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.592661 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="4336b794-da9d-464c-87f0-e22f1041f630" containerName="registry-server" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.592676 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2221836-d40a-4233-81c3-115014596886" containerName="init" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.594597 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.606763 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nbbbd"] Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.705978 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n9p4\" (UniqueName: \"kubernetes.io/projected/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-kube-api-access-6n9p4\") pod \"redhat-operators-nbbbd\" (UID: \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\") " pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.706552 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-catalog-content\") pod \"redhat-operators-nbbbd\" (UID: \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\") " pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.706620 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-utilities\") pod \"redhat-operators-nbbbd\" (UID: \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\") " pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.808460 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n9p4\" (UniqueName: \"kubernetes.io/projected/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-kube-api-access-6n9p4\") pod \"redhat-operators-nbbbd\" (UID: \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\") " pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.808546 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-catalog-content\") pod \"redhat-operators-nbbbd\" (UID: \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\") " pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.808591 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-utilities\") pod \"redhat-operators-nbbbd\" (UID: \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\") " pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.809030 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-utilities\") pod \"redhat-operators-nbbbd\" (UID: \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\") " pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.809049 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-catalog-content\") pod \"redhat-operators-nbbbd\" (UID: \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\") " pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.836976 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n9p4\" (UniqueName: \"kubernetes.io/projected/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-kube-api-access-6n9p4\") pod \"redhat-operators-nbbbd\" (UID: \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\") " pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:08 crc kubenswrapper[4935]: I1201 18:54:08.933778 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:09 crc kubenswrapper[4935]: I1201 18:54:09.841823 4935 generic.go:334] "Generic (PLEG): container finished" podID="1a7ac48a-042f-4d13-a9ac-d8449e732bbf" containerID="485d989eb2289e695767dc43c6ac3f906250cd329a311eceabf214a7271ea533" exitCode=0 Dec 01 18:54:09 crc kubenswrapper[4935]: I1201 18:54:09.841872 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-d4ntp" event={"ID":"1a7ac48a-042f-4d13-a9ac-d8449e732bbf","Type":"ContainerDied","Data":"485d989eb2289e695767dc43c6ac3f906250cd329a311eceabf214a7271ea533"} Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.689844 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.853325 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-config-data\") pod \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.853424 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpm2q\" (UniqueName: \"kubernetes.io/projected/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-kube-api-access-gpm2q\") pod \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.853448 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-fernet-keys\") pod \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.853494 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-scripts\") pod \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.853522 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-credential-keys\") pod \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.853571 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-combined-ca-bundle\") pod \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\" (UID: \"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8\") " Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.860512 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f5lxr" event={"ID":"0a45e802-f6bc-4a6f-8a7e-c3173d4773e8","Type":"ContainerDied","Data":"aa79d309c00063b791014c956e7c01e21618567781c2f2d0eaf568e0892ef0b8"} Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.860575 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa79d309c00063b791014c956e7c01e21618567781c2f2d0eaf568e0892ef0b8" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.860610 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f5lxr" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.876404 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-scripts" (OuterVolumeSpecName: "scripts") pod "0a45e802-f6bc-4a6f-8a7e-c3173d4773e8" (UID: "0a45e802-f6bc-4a6f-8a7e-c3173d4773e8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.876955 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-kube-api-access-gpm2q" (OuterVolumeSpecName: "kube-api-access-gpm2q") pod "0a45e802-f6bc-4a6f-8a7e-c3173d4773e8" (UID: "0a45e802-f6bc-4a6f-8a7e-c3173d4773e8"). InnerVolumeSpecName "kube-api-access-gpm2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.881112 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0a45e802-f6bc-4a6f-8a7e-c3173d4773e8" (UID: "0a45e802-f6bc-4a6f-8a7e-c3173d4773e8"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.881362 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "0a45e802-f6bc-4a6f-8a7e-c3173d4773e8" (UID: "0a45e802-f6bc-4a6f-8a7e-c3173d4773e8"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.891299 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-config-data" (OuterVolumeSpecName: "config-data") pod "0a45e802-f6bc-4a6f-8a7e-c3173d4773e8" (UID: "0a45e802-f6bc-4a6f-8a7e-c3173d4773e8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.898412 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0a45e802-f6bc-4a6f-8a7e-c3173d4773e8" (UID: "0a45e802-f6bc-4a6f-8a7e-c3173d4773e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.957654 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.957692 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.957702 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpm2q\" (UniqueName: \"kubernetes.io/projected/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-kube-api-access-gpm2q\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.957714 4935 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.957723 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:10 crc kubenswrapper[4935]: I1201 18:54:10.957732 4935 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:11 crc kubenswrapper[4935]: I1201 18:54:11.803164 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-f5lxr"] Dec 01 18:54:11 crc kubenswrapper[4935]: I1201 18:54:11.817257 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-f5lxr"] Dec 01 18:54:11 crc kubenswrapper[4935]: I1201 18:54:11.938080 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-bb44q"] Dec 01 18:54:11 crc kubenswrapper[4935]: E1201 18:54:11.938656 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a45e802-f6bc-4a6f-8a7e-c3173d4773e8" containerName="keystone-bootstrap" Dec 01 18:54:11 crc kubenswrapper[4935]: I1201 18:54:11.938671 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a45e802-f6bc-4a6f-8a7e-c3173d4773e8" containerName="keystone-bootstrap" Dec 01 18:54:11 crc kubenswrapper[4935]: I1201 18:54:11.938910 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a45e802-f6bc-4a6f-8a7e-c3173d4773e8" containerName="keystone-bootstrap" Dec 01 18:54:11 crc kubenswrapper[4935]: I1201 18:54:11.939783 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:11 crc kubenswrapper[4935]: I1201 18:54:11.952114 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 18:54:11 crc kubenswrapper[4935]: I1201 18:54:11.952427 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 18:54:11 crc kubenswrapper[4935]: I1201 18:54:11.952612 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 18:54:11 crc kubenswrapper[4935]: I1201 18:54:11.960973 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 01 18:54:11 crc kubenswrapper[4935]: I1201 18:54:11.963096 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-9jnc7" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.080500 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-bb44q"] Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.098364 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-scripts\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.098437 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wn5tn\" (UniqueName: \"kubernetes.io/projected/56e181ed-d603-4a53-aa57-222d888ce5a2-kube-api-access-wn5tn\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.098544 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-credential-keys\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.098606 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-config-data\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.098629 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-fernet-keys\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.098649 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-combined-ca-bundle\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.200288 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-combined-ca-bundle\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.200476 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-scripts\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.200553 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wn5tn\" (UniqueName: \"kubernetes.io/projected/56e181ed-d603-4a53-aa57-222d888ce5a2-kube-api-access-wn5tn\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.200603 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-credential-keys\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.201118 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-config-data\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.201177 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-fernet-keys\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.209946 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-scripts\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.210095 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-credential-keys\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.210258 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-config-data\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.210563 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-fernet-keys\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.215280 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-combined-ca-bundle\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.233935 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wn5tn\" (UniqueName: \"kubernetes.io/projected/56e181ed-d603-4a53-aa57-222d888ce5a2-kube-api-access-wn5tn\") pod \"keystone-bootstrap-bb44q\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.261182 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" podUID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.144:5353: i/o timeout" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.278786 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:12 crc kubenswrapper[4935]: I1201 18:54:12.525889 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a45e802-f6bc-4a6f-8a7e-c3173d4773e8" path="/var/lib/kubelet/pods/0a45e802-f6bc-4a6f-8a7e-c3173d4773e8/volumes" Dec 01 18:54:17 crc kubenswrapper[4935]: I1201 18:54:17.261450 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" podUID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.144:5353: i/o timeout" Dec 01 18:54:17 crc kubenswrapper[4935]: I1201 18:54:17.262198 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:54:18 crc kubenswrapper[4935]: I1201 18:54:18.982571 4935 generic.go:334] "Generic (PLEG): container finished" podID="10f9fc1e-d72b-4123-b805-82a03d56c439" containerID="8e07534970642545d9945301261beac2ca2b1894d54971fca61311cdd0d03f4a" exitCode=0 Dec 01 18:54:18 crc kubenswrapper[4935]: I1201 18:54:18.982696 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-8mddp" event={"ID":"10f9fc1e-d72b-4123-b805-82a03d56c439","Type":"ContainerDied","Data":"8e07534970642545d9945301261beac2ca2b1894d54971fca61311cdd0d03f4a"} Dec 01 18:54:22 crc kubenswrapper[4935]: I1201 18:54:22.262280 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" podUID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.144:5353: i/o timeout" Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.588715 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.680858 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-ovsdbserver-sb\") pod \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.681010 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-ovsdbserver-nb\") pod \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.681044 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-config\") pod \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.681087 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvjrw\" (UniqueName: \"kubernetes.io/projected/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-kube-api-access-nvjrw\") pod \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.681156 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-dns-svc\") pod \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\" (UID: \"5f14ba90-8960-4a1c-b3fb-f740cb4a6650\") " Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.691989 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-kube-api-access-nvjrw" (OuterVolumeSpecName: "kube-api-access-nvjrw") pod "5f14ba90-8960-4a1c-b3fb-f740cb4a6650" (UID: "5f14ba90-8960-4a1c-b3fb-f740cb4a6650"). InnerVolumeSpecName "kube-api-access-nvjrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.733427 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-config" (OuterVolumeSpecName: "config") pod "5f14ba90-8960-4a1c-b3fb-f740cb4a6650" (UID: "5f14ba90-8960-4a1c-b3fb-f740cb4a6650"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.739665 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5f14ba90-8960-4a1c-b3fb-f740cb4a6650" (UID: "5f14ba90-8960-4a1c-b3fb-f740cb4a6650"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.740787 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5f14ba90-8960-4a1c-b3fb-f740cb4a6650" (UID: "5f14ba90-8960-4a1c-b3fb-f740cb4a6650"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.741843 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5f14ba90-8960-4a1c-b3fb-f740cb4a6650" (UID: "5f14ba90-8960-4a1c-b3fb-f740cb4a6650"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.784213 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.784247 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.784258 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.784268 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvjrw\" (UniqueName: \"kubernetes.io/projected/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-kube-api-access-nvjrw\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:23 crc kubenswrapper[4935]: I1201 18:54:23.784278 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f14ba90-8960-4a1c-b3fb-f740cb4a6650-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:23 crc kubenswrapper[4935]: E1201 18:54:23.887071 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified" Dec 01 18:54:23 crc kubenswrapper[4935]: E1201 18:54:23.887287 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:heat-db-sync,Image:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,Command:[/bin/bash],Args:[-c /usr/bin/heat-manage --config-dir /etc/heat/heat.conf.d db_sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/heat/heat.conf.d/00-default.conf,SubPath:00-default.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/heat/heat.conf.d/01-custom.conf,SubPath:01-custom.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vj9nw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42418,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42418,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-db-sync-747dz_openstack(7935698e-d40a-4c10-bf91-0a5d8855a09e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:54:23 crc kubenswrapper[4935]: E1201 18:54:23.888584 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/heat-db-sync-747dz" podUID="7935698e-d40a-4c10-bf91-0a5d8855a09e" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.088309 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" event={"ID":"5f14ba90-8960-4a1c-b3fb-f740cb4a6650","Type":"ContainerDied","Data":"2d8c481197013e5a651dff90b06ed0407f1287c7504cdd78ed8dbe4aabd651d1"} Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.088368 4935 scope.go:117] "RemoveContainer" containerID="f935a6d4eb7ad0ab42ad4156cf94aa42adaf17fb576bc8ae7aec2992225e7a41" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.088534 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" Dec 01 18:54:24 crc kubenswrapper[4935]: E1201 18:54:24.090105 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified\\\"\"" pod="openstack/heat-db-sync-747dz" podUID="7935698e-d40a-4c10-bf91-0a5d8855a09e" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.145026 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-cjlh4"] Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.153833 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-cjlh4"] Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.404435 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-8mddp" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.408984 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-d4ntp" Dec 01 18:54:24 crc kubenswrapper[4935]: E1201 18:54:24.455872 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 01 18:54:24 crc kubenswrapper[4935]: E1201 18:54:24.458242 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pnpps,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-gk8gw_openstack(1e60c370-8ffd-4b97-a829-176da28bf116): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:54:24 crc kubenswrapper[4935]: E1201 18:54:24.464388 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-gk8gw" podUID="1e60c370-8ffd-4b97-a829-176da28bf116" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.499377 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/10f9fc1e-d72b-4123-b805-82a03d56c439-config\") pod \"10f9fc1e-d72b-4123-b805-82a03d56c439\" (UID: \"10f9fc1e-d72b-4123-b805-82a03d56c439\") " Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.499560 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-config-data\") pod \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.500665 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-db-sync-config-data\") pod \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.500706 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khrkh\" (UniqueName: \"kubernetes.io/projected/10f9fc1e-d72b-4123-b805-82a03d56c439-kube-api-access-khrkh\") pod \"10f9fc1e-d72b-4123-b805-82a03d56c439\" (UID: \"10f9fc1e-d72b-4123-b805-82a03d56c439\") " Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.500844 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10f9fc1e-d72b-4123-b805-82a03d56c439-combined-ca-bundle\") pod \"10f9fc1e-d72b-4123-b805-82a03d56c439\" (UID: \"10f9fc1e-d72b-4123-b805-82a03d56c439\") " Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.500874 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-combined-ca-bundle\") pod \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.500926 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8scb\" (UniqueName: \"kubernetes.io/projected/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-kube-api-access-n8scb\") pod \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\" (UID: \"1a7ac48a-042f-4d13-a9ac-d8449e732bbf\") " Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.508502 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "1a7ac48a-042f-4d13-a9ac-d8449e732bbf" (UID: "1a7ac48a-042f-4d13-a9ac-d8449e732bbf"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.517672 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-kube-api-access-n8scb" (OuterVolumeSpecName: "kube-api-access-n8scb") pod "1a7ac48a-042f-4d13-a9ac-d8449e732bbf" (UID: "1a7ac48a-042f-4d13-a9ac-d8449e732bbf"). InnerVolumeSpecName "kube-api-access-n8scb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.522330 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" path="/var/lib/kubelet/pods/5f14ba90-8960-4a1c-b3fb-f740cb4a6650/volumes" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.522321 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10f9fc1e-d72b-4123-b805-82a03d56c439-kube-api-access-khrkh" (OuterVolumeSpecName: "kube-api-access-khrkh") pod "10f9fc1e-d72b-4123-b805-82a03d56c439" (UID: "10f9fc1e-d72b-4123-b805-82a03d56c439"). InnerVolumeSpecName "kube-api-access-khrkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.532647 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10f9fc1e-d72b-4123-b805-82a03d56c439-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "10f9fc1e-d72b-4123-b805-82a03d56c439" (UID: "10f9fc1e-d72b-4123-b805-82a03d56c439"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.534290 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10f9fc1e-d72b-4123-b805-82a03d56c439-config" (OuterVolumeSpecName: "config") pod "10f9fc1e-d72b-4123-b805-82a03d56c439" (UID: "10f9fc1e-d72b-4123-b805-82a03d56c439"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.543412 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1a7ac48a-042f-4d13-a9ac-d8449e732bbf" (UID: "1a7ac48a-042f-4d13-a9ac-d8449e732bbf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.573190 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-config-data" (OuterVolumeSpecName: "config-data") pod "1a7ac48a-042f-4d13-a9ac-d8449e732bbf" (UID: "1a7ac48a-042f-4d13-a9ac-d8449e732bbf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.605664 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10f9fc1e-d72b-4123-b805-82a03d56c439-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.605704 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.605717 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8scb\" (UniqueName: \"kubernetes.io/projected/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-kube-api-access-n8scb\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.605732 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/10f9fc1e-d72b-4123-b805-82a03d56c439-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.605746 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.605757 4935 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1a7ac48a-042f-4d13-a9ac-d8449e732bbf-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:24 crc kubenswrapper[4935]: I1201 18:54:24.605768 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khrkh\" (UniqueName: \"kubernetes.io/projected/10f9fc1e-d72b-4123-b805-82a03d56c439-kube-api-access-khrkh\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.110129 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-d4ntp" event={"ID":"1a7ac48a-042f-4d13-a9ac-d8449e732bbf","Type":"ContainerDied","Data":"533a054e887b2c51e54f1df125742c43f1355eb6f20c9e2400bea75c77e87fa5"} Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.110224 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="533a054e887b2c51e54f1df125742c43f1355eb6f20c9e2400bea75c77e87fa5" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.110368 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-d4ntp" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.121968 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-8mddp" event={"ID":"10f9fc1e-d72b-4123-b805-82a03d56c439","Type":"ContainerDied","Data":"eab7ee8343d6888b022bfc134bf41599d549098b309a88b685d1414464fb4d59"} Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.122024 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eab7ee8343d6888b022bfc134bf41599d549098b309a88b685d1414464fb4d59" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.122687 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-8mddp" Dec 01 18:54:25 crc kubenswrapper[4935]: E1201 18:54:25.123447 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-gk8gw" podUID="1e60c370-8ffd-4b97-a829-176da28bf116" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.631763 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79cd4f6685-mvbmf"] Dec 01 18:54:25 crc kubenswrapper[4935]: E1201 18:54:25.633610 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" containerName="init" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.633631 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" containerName="init" Dec 01 18:54:25 crc kubenswrapper[4935]: E1201 18:54:25.633647 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a7ac48a-042f-4d13-a9ac-d8449e732bbf" containerName="glance-db-sync" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.633653 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a7ac48a-042f-4d13-a9ac-d8449e732bbf" containerName="glance-db-sync" Dec 01 18:54:25 crc kubenswrapper[4935]: E1201 18:54:25.633675 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" containerName="dnsmasq-dns" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.633680 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" containerName="dnsmasq-dns" Dec 01 18:54:25 crc kubenswrapper[4935]: E1201 18:54:25.633704 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10f9fc1e-d72b-4123-b805-82a03d56c439" containerName="neutron-db-sync" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.633710 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="10f9fc1e-d72b-4123-b805-82a03d56c439" containerName="neutron-db-sync" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.633911 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="10f9fc1e-d72b-4123-b805-82a03d56c439" containerName="neutron-db-sync" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.633941 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a7ac48a-042f-4d13-a9ac-d8449e732bbf" containerName="glance-db-sync" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.633952 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" containerName="dnsmasq-dns" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.635906 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.657889 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79cd4f6685-mvbmf"] Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.740400 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-dns-swift-storage-0\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.740451 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cfl9\" (UniqueName: \"kubernetes.io/projected/9c07307f-5812-4ba6-bf01-56e22121c34d-kube-api-access-6cfl9\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.740477 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-ovsdbserver-nb\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.740493 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-ovsdbserver-sb\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.740554 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-config\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.740653 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-dns-svc\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.748675 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5f87769df8-nbb75"] Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.751355 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.755139 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.755225 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.755346 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.755483 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-kbcjk" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.767132 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5f87769df8-nbb75"] Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.842403 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-dns-svc\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.842468 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-combined-ca-bundle\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.842499 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-dns-swift-storage-0\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.842529 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cfl9\" (UniqueName: \"kubernetes.io/projected/9c07307f-5812-4ba6-bf01-56e22121c34d-kube-api-access-6cfl9\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.842554 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-ovsdbserver-nb\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.842572 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-ovsdbserver-sb\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.842652 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-config\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.842722 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-httpd-config\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.842739 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-config\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.842817 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vhdz\" (UniqueName: \"kubernetes.io/projected/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-kube-api-access-6vhdz\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.842842 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-ovndb-tls-certs\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.843666 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-dns-svc\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.844224 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-ovsdbserver-sb\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.844275 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-config\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.844947 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-ovsdbserver-nb\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.845048 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-dns-swift-storage-0\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.869031 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cfl9\" (UniqueName: \"kubernetes.io/projected/9c07307f-5812-4ba6-bf01-56e22121c34d-kube-api-access-6cfl9\") pod \"dnsmasq-dns-79cd4f6685-mvbmf\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.893914 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79cd4f6685-mvbmf"] Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.895113 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.942251 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-cv8dw"] Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.944740 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.945942 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-combined-ca-bundle\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.946104 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-httpd-config\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.946128 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-config\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.946382 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vhdz\" (UniqueName: \"kubernetes.io/projected/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-kube-api-access-6vhdz\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.946401 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-ovndb-tls-certs\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.950909 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-httpd-config\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.952795 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-config\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.964762 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-cv8dw"] Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.968938 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-combined-ca-bundle\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.970315 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-ovndb-tls-certs\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:25 crc kubenswrapper[4935]: I1201 18:54:25.973807 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vhdz\" (UniqueName: \"kubernetes.io/projected/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-kube-api-access-6vhdz\") pod \"neutron-5f87769df8-nbb75\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.048131 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbrs9\" (UniqueName: \"kubernetes.io/projected/b830d4fa-99f6-4f0b-9220-75cf3170a78c-kube-api-access-fbrs9\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.048234 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-config\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.048356 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-dns-svc\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.048444 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.048862 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.048906 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.078894 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.150822 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.150927 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.150953 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.151009 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbrs9\" (UniqueName: \"kubernetes.io/projected/b830d4fa-99f6-4f0b-9220-75cf3170a78c-kube-api-access-fbrs9\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.151061 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-config\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.151135 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-dns-svc\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.151695 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.152084 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-dns-svc\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.152448 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.152467 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.152877 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-config\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.168887 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbrs9\" (UniqueName: \"kubernetes.io/projected/b830d4fa-99f6-4f0b-9220-75cf3170a78c-kube-api-access-fbrs9\") pod \"dnsmasq-dns-6b7b667979-cv8dw\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: E1201 18:54:26.333273 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 01 18:54:26 crc kubenswrapper[4935]: E1201 18:54:26.333445 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lpkrf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-c8448_openstack(b8f827a2-a529-4371-8c82-c06377b2c9f2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:54:26 crc kubenswrapper[4935]: E1201 18:54:26.336829 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-c8448" podUID="b8f827a2-a529-4371-8c82-c06377b2c9f2" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.357838 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.366448 4935 scope.go:117] "RemoveContainer" containerID="83213db29832ab816fb4e977406624c01233ff40ba7fec2539457866fe31b07e" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.807456 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.809930 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.812730 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.813983 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.817413 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-kv6rd" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.832940 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.874507 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvm9d\" (UniqueName: \"kubernetes.io/projected/5089e065-96a2-4154-ab96-4ad8a2a0aa56-kube-api-access-vvm9d\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.874555 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-config-data\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.874589 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.874631 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5089e065-96a2-4154-ab96-4ad8a2a0aa56-logs\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.874649 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.874688 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-scripts\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.874742 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5089e065-96a2-4154-ab96-4ad8a2a0aa56-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: W1201 18:54:26.913455 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56e181ed_d603_4a53_aa57_222d888ce5a2.slice/crio-69e637c148afc0c09a771ff7bd928c42f0c80724f44deb2f82f94844c13abfed WatchSource:0}: Error finding container 69e637c148afc0c09a771ff7bd928c42f0c80724f44deb2f82f94844c13abfed: Status 404 returned error can't find the container with id 69e637c148afc0c09a771ff7bd928c42f0c80724f44deb2f82f94844c13abfed Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.930495 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-bb44q"] Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.981206 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvm9d\" (UniqueName: \"kubernetes.io/projected/5089e065-96a2-4154-ab96-4ad8a2a0aa56-kube-api-access-vvm9d\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.981256 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-config-data\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.981290 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.981334 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5089e065-96a2-4154-ab96-4ad8a2a0aa56-logs\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.981355 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.981390 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-scripts\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.981441 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5089e065-96a2-4154-ab96-4ad8a2a0aa56-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.981828 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5089e065-96a2-4154-ab96-4ad8a2a0aa56-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.981909 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5089e065-96a2-4154-ab96-4ad8a2a0aa56-logs\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.982076 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.993430 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.993590 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-scripts\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:26 crc kubenswrapper[4935]: I1201 18:54:26.995798 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-config-data\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.004646 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvm9d\" (UniqueName: \"kubernetes.io/projected/5089e065-96a2-4154-ab96-4ad8a2a0aa56-kube-api-access-vvm9d\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.028644 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nbbbd"] Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.035472 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.108012 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.110424 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.113362 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.116329 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.134232 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.169486 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bwph6" event={"ID":"ae58d3fe-1a16-467c-b5c9-9522cb473a03","Type":"ContainerStarted","Data":"03a1a6c961272e9ce1ff5b99476ddd33cad510815560c954160a1e690e78533b"} Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.185924 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.185991 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfzx6\" (UniqueName: \"kubernetes.io/projected/cbe1983d-dc1b-4350-98e3-b7db576086ae-kube-api-access-zfzx6\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.186029 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.186052 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cbe1983d-dc1b-4350-98e3-b7db576086ae-logs\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.186177 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cbe1983d-dc1b-4350-98e3-b7db576086ae-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.186228 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.186277 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.203390 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7392ddad-969f-4a42-86a1-460f3ca2d500","Type":"ContainerStarted","Data":"4629a371e99213e2533baf55c30b880375731480aa38b623530d9a06141e7080"} Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.205413 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bb44q" event={"ID":"56e181ed-d603-4a53-aa57-222d888ce5a2","Type":"ContainerStarted","Data":"69e637c148afc0c09a771ff7bd928c42f0c80724f44deb2f82f94844c13abfed"} Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.209532 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbbbd" event={"ID":"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f","Type":"ContainerStarted","Data":"f8cc284f31f03d12dae19eff3bc93bd7b183e455df4e397d39c2c5add5163776"} Dec 01 18:54:27 crc kubenswrapper[4935]: E1201 18:54:27.211154 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-c8448" podUID="b8f827a2-a529-4371-8c82-c06377b2c9f2" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.215408 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-bwph6" podStartSLOduration=3.4924094439999998 podStartE2EDuration="37.215381271s" podCreationTimestamp="2025-12-01 18:53:50 +0000 UTC" firstStartedPulling="2025-12-01 18:53:52.59225 +0000 UTC m=+1446.613879259" lastFinishedPulling="2025-12-01 18:54:26.315221827 +0000 UTC m=+1480.336851086" observedRunningTime="2025-12-01 18:54:27.186716502 +0000 UTC m=+1481.208345831" watchObservedRunningTime="2025-12-01 18:54:27.215381271 +0000 UTC m=+1481.237010530" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.265431 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-cjlh4" podUID="5f14ba90-8960-4a1c-b3fb-f740cb4a6650" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.144:5353: i/o timeout" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.288994 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cbe1983d-dc1b-4350-98e3-b7db576086ae-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.289062 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.289116 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.289266 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.289293 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfzx6\" (UniqueName: \"kubernetes.io/projected/cbe1983d-dc1b-4350-98e3-b7db576086ae-kube-api-access-zfzx6\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.289312 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.289329 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cbe1983d-dc1b-4350-98e3-b7db576086ae-logs\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.290481 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.290630 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cbe1983d-dc1b-4350-98e3-b7db576086ae-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.292933 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cbe1983d-dc1b-4350-98e3-b7db576086ae-logs\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.296089 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.298137 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.301361 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.313576 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfzx6\" (UniqueName: \"kubernetes.io/projected/cbe1983d-dc1b-4350-98e3-b7db576086ae-kube-api-access-zfzx6\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.358420 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.431959 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.440585 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-cv8dw"] Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.475374 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79cd4f6685-mvbmf"] Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.539556 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5f87769df8-nbb75"] Dec 01 18:54:27 crc kubenswrapper[4935]: W1201 18:54:27.589087 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1728f1b_5640_4c6d_ba3c_c8096d0407c4.slice/crio-d124ba1cc6f694042eda249a1f7ee31e1254225bb4fbb353ba43854f7b34f901 WatchSource:0}: Error finding container d124ba1cc6f694042eda249a1f7ee31e1254225bb4fbb353ba43854f7b34f901: Status 404 returned error can't find the container with id d124ba1cc6f694042eda249a1f7ee31e1254225bb4fbb353ba43854f7b34f901 Dec 01 18:54:27 crc kubenswrapper[4935]: I1201 18:54:27.794966 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:54:28 crc kubenswrapper[4935]: I1201 18:54:28.175038 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:54:28 crc kubenswrapper[4935]: I1201 18:54:28.343116 4935 generic.go:334] "Generic (PLEG): container finished" podID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" containerID="9a4be17e31ab2b86f171947234b26fcbddef8ce0f6c66676b389ff8b360d1ca5" exitCode=0 Dec 01 18:54:28 crc kubenswrapper[4935]: I1201 18:54:28.343514 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbbbd" event={"ID":"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f","Type":"ContainerDied","Data":"9a4be17e31ab2b86f171947234b26fcbddef8ce0f6c66676b389ff8b360d1ca5"} Dec 01 18:54:28 crc kubenswrapper[4935]: I1201 18:54:28.348887 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" event={"ID":"9c07307f-5812-4ba6-bf01-56e22121c34d","Type":"ContainerStarted","Data":"af76f5db6f828ceacc29911590cb0b36cef07b1576aa681753c051643509c6e9"} Dec 01 18:54:28 crc kubenswrapper[4935]: I1201 18:54:28.352741 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5089e065-96a2-4154-ab96-4ad8a2a0aa56","Type":"ContainerStarted","Data":"e3fc9f955f3a84f56df328f2dfbacc68a802bf2916456fdcf154071838d014d3"} Dec 01 18:54:28 crc kubenswrapper[4935]: I1201 18:54:28.365447 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f87769df8-nbb75" event={"ID":"c1728f1b-5640-4c6d-ba3c-c8096d0407c4","Type":"ContainerStarted","Data":"aaec48903ccecffa0cded70c65e635a3cdf3d78a110bd64dc94c7f47f41b437c"} Dec 01 18:54:28 crc kubenswrapper[4935]: I1201 18:54:28.365511 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f87769df8-nbb75" event={"ID":"c1728f1b-5640-4c6d-ba3c-c8096d0407c4","Type":"ContainerStarted","Data":"d124ba1cc6f694042eda249a1f7ee31e1254225bb4fbb353ba43854f7b34f901"} Dec 01 18:54:28 crc kubenswrapper[4935]: I1201 18:54:28.381864 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bb44q" event={"ID":"56e181ed-d603-4a53-aa57-222d888ce5a2","Type":"ContainerStarted","Data":"6fe0e44ed0de290991d50c077f3a0f53acd6c7fa8f04a427206ac15bc2a4307a"} Dec 01 18:54:28 crc kubenswrapper[4935]: I1201 18:54:28.387909 4935 generic.go:334] "Generic (PLEG): container finished" podID="b830d4fa-99f6-4f0b-9220-75cf3170a78c" containerID="9a1b3a34a37f8006ce66a51e1e0660b6e6e87e48f0b31f9ab5a627fe132b3cb9" exitCode=0 Dec 01 18:54:28 crc kubenswrapper[4935]: I1201 18:54:28.388058 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" event={"ID":"b830d4fa-99f6-4f0b-9220-75cf3170a78c","Type":"ContainerDied","Data":"9a1b3a34a37f8006ce66a51e1e0660b6e6e87e48f0b31f9ab5a627fe132b3cb9"} Dec 01 18:54:28 crc kubenswrapper[4935]: I1201 18:54:28.388118 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" event={"ID":"b830d4fa-99f6-4f0b-9220-75cf3170a78c","Type":"ContainerStarted","Data":"3b5dd279d772d8c8dd66877f11da4e9b264f1a5dd07f9641288782f4116ceeb7"} Dec 01 18:54:28 crc kubenswrapper[4935]: I1201 18:54:28.423646 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-bb44q" podStartSLOduration=17.423629174 podStartE2EDuration="17.423629174s" podCreationTimestamp="2025-12-01 18:54:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:54:28.420400563 +0000 UTC m=+1482.442029822" watchObservedRunningTime="2025-12-01 18:54:28.423629174 +0000 UTC m=+1482.445258433" Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.400236 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5089e065-96a2-4154-ab96-4ad8a2a0aa56","Type":"ContainerStarted","Data":"5b8d2b6668e3630a42c06ea8253ffb829aa9ddf073068173bb9f268f17fe0240"} Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.403706 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f87769df8-nbb75" event={"ID":"c1728f1b-5640-4c6d-ba3c-c8096d0407c4","Type":"ContainerStarted","Data":"67d92002666cdf3baf0db8af5e52b65f139da2f7fb047074d000b28292c5bb78"} Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.405347 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.420292 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" event={"ID":"b830d4fa-99f6-4f0b-9220-75cf3170a78c","Type":"ContainerStarted","Data":"1091ecf6b8016ea3f882be3ebea831e46a5edbe036c7bd3c5a35435947a40379"} Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.420474 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.431073 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5f87769df8-nbb75" podStartSLOduration=4.431056852 podStartE2EDuration="4.431056852s" podCreationTimestamp="2025-12-01 18:54:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:54:29.42325068 +0000 UTC m=+1483.444879939" watchObservedRunningTime="2025-12-01 18:54:29.431056852 +0000 UTC m=+1483.452686111" Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.431624 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cbe1983d-dc1b-4350-98e3-b7db576086ae","Type":"ContainerStarted","Data":"8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68"} Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.431685 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cbe1983d-dc1b-4350-98e3-b7db576086ae","Type":"ContainerStarted","Data":"69ec840f5e448c96928426eba9b95f2ca694c8e4f98b49966c9956e9444aadd6"} Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.435631 4935 generic.go:334] "Generic (PLEG): container finished" podID="9c07307f-5812-4ba6-bf01-56e22121c34d" containerID="96e53fb83c03fe1a2d1a75d0f45c16c915e0893761a4d36b2fa7571fd0b199f9" exitCode=0 Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.435916 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" event={"ID":"9c07307f-5812-4ba6-bf01-56e22121c34d","Type":"ContainerDied","Data":"96e53fb83c03fe1a2d1a75d0f45c16c915e0893761a4d36b2fa7571fd0b199f9"} Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.447087 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" podStartSLOduration=4.447064859 podStartE2EDuration="4.447064859s" podCreationTimestamp="2025-12-01 18:54:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:54:29.444054265 +0000 UTC m=+1483.465683524" watchObservedRunningTime="2025-12-01 18:54:29.447064859 +0000 UTC m=+1483.468694118" Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.684389 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.803078 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:54:29 crc kubenswrapper[4935]: I1201 18:54:29.976673 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.092776 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-dns-svc\") pod \"9c07307f-5812-4ba6-bf01-56e22121c34d\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.092862 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-ovsdbserver-nb\") pod \"9c07307f-5812-4ba6-bf01-56e22121c34d\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.093038 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-config\") pod \"9c07307f-5812-4ba6-bf01-56e22121c34d\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.093128 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cfl9\" (UniqueName: \"kubernetes.io/projected/9c07307f-5812-4ba6-bf01-56e22121c34d-kube-api-access-6cfl9\") pod \"9c07307f-5812-4ba6-bf01-56e22121c34d\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.093160 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-ovsdbserver-sb\") pod \"9c07307f-5812-4ba6-bf01-56e22121c34d\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.093191 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-dns-swift-storage-0\") pod \"9c07307f-5812-4ba6-bf01-56e22121c34d\" (UID: \"9c07307f-5812-4ba6-bf01-56e22121c34d\") " Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.107423 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c07307f-5812-4ba6-bf01-56e22121c34d-kube-api-access-6cfl9" (OuterVolumeSpecName: "kube-api-access-6cfl9") pod "9c07307f-5812-4ba6-bf01-56e22121c34d" (UID: "9c07307f-5812-4ba6-bf01-56e22121c34d"). InnerVolumeSpecName "kube-api-access-6cfl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.133845 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9c07307f-5812-4ba6-bf01-56e22121c34d" (UID: "9c07307f-5812-4ba6-bf01-56e22121c34d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.149560 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9c07307f-5812-4ba6-bf01-56e22121c34d" (UID: "9c07307f-5812-4ba6-bf01-56e22121c34d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.158215 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9c07307f-5812-4ba6-bf01-56e22121c34d" (UID: "9c07307f-5812-4ba6-bf01-56e22121c34d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.161662 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-config" (OuterVolumeSpecName: "config") pod "9c07307f-5812-4ba6-bf01-56e22121c34d" (UID: "9c07307f-5812-4ba6-bf01-56e22121c34d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.162450 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9c07307f-5812-4ba6-bf01-56e22121c34d" (UID: "9c07307f-5812-4ba6-bf01-56e22121c34d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.195723 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.195756 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.195766 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.195776 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cfl9\" (UniqueName: \"kubernetes.io/projected/9c07307f-5812-4ba6-bf01-56e22121c34d-kube-api-access-6cfl9\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.195786 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.195795 4935 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c07307f-5812-4ba6-bf01-56e22121c34d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.449752 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5089e065-96a2-4154-ab96-4ad8a2a0aa56" containerName="glance-log" containerID="cri-o://5b8d2b6668e3630a42c06ea8253ffb829aa9ddf073068173bb9f268f17fe0240" gracePeriod=30 Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.449806 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5089e065-96a2-4154-ab96-4ad8a2a0aa56" containerName="glance-httpd" containerID="cri-o://d5b60a3f24877c12432df58f3eba7b3252c623908680f6cd73aa7abb1893dad6" gracePeriod=30 Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.454458 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7392ddad-969f-4a42-86a1-460f3ca2d500","Type":"ContainerStarted","Data":"b14c34b1b0dda050ae97b1323f3ad1f76fce0fc3439cef9b136e325e3819408b"} Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.471249 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbbbd" event={"ID":"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f","Type":"ContainerStarted","Data":"072a445725877b1568f887e71245dd31efaac281dc614a27dce1ab713666d72a"} Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.493692 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cbe1983d-dc1b-4350-98e3-b7db576086ae","Type":"ContainerStarted","Data":"3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db"} Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.493877 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cbe1983d-dc1b-4350-98e3-b7db576086ae" containerName="glance-log" containerID="cri-o://8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68" gracePeriod=30 Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.494028 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cbe1983d-dc1b-4350-98e3-b7db576086ae" containerName="glance-httpd" containerID="cri-o://3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db" gracePeriod=30 Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.510409 4935 generic.go:334] "Generic (PLEG): container finished" podID="ae58d3fe-1a16-467c-b5c9-9522cb473a03" containerID="03a1a6c961272e9ce1ff5b99476ddd33cad510815560c954160a1e690e78533b" exitCode=0 Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.514337 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.5143182809999995 podStartE2EDuration="5.514318281s" podCreationTimestamp="2025-12-01 18:54:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:54:30.477590832 +0000 UTC m=+1484.499220091" watchObservedRunningTime="2025-12-01 18:54:30.514318281 +0000 UTC m=+1484.535947540" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.533299 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.545306 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bwph6" event={"ID":"ae58d3fe-1a16-467c-b5c9-9522cb473a03","Type":"ContainerDied","Data":"03a1a6c961272e9ce1ff5b99476ddd33cad510815560c954160a1e690e78533b"} Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.545355 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79cd4f6685-mvbmf" event={"ID":"9c07307f-5812-4ba6-bf01-56e22121c34d","Type":"ContainerDied","Data":"af76f5db6f828ceacc29911590cb0b36cef07b1576aa681753c051643509c6e9"} Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.545394 4935 scope.go:117] "RemoveContainer" containerID="96e53fb83c03fe1a2d1a75d0f45c16c915e0893761a4d36b2fa7571fd0b199f9" Dec 01 18:54:30 crc kubenswrapper[4935]: I1201 18:54:30.571730 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.5717025 podStartE2EDuration="4.5717025s" podCreationTimestamp="2025-12-01 18:54:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:54:30.517021145 +0000 UTC m=+1484.538650404" watchObservedRunningTime="2025-12-01 18:54:30.5717025 +0000 UTC m=+1484.593331759" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:30.944702 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79cd4f6685-mvbmf"] Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:30.953953 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79cd4f6685-mvbmf"] Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.222192 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.225457 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-scripts\") pod \"cbe1983d-dc1b-4350-98e3-b7db576086ae\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.225549 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfzx6\" (UniqueName: \"kubernetes.io/projected/cbe1983d-dc1b-4350-98e3-b7db576086ae-kube-api-access-zfzx6\") pod \"cbe1983d-dc1b-4350-98e3-b7db576086ae\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.225606 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cbe1983d-dc1b-4350-98e3-b7db576086ae-logs\") pod \"cbe1983d-dc1b-4350-98e3-b7db576086ae\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.225807 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-combined-ca-bundle\") pod \"cbe1983d-dc1b-4350-98e3-b7db576086ae\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.225906 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cbe1983d-dc1b-4350-98e3-b7db576086ae-httpd-run\") pod \"cbe1983d-dc1b-4350-98e3-b7db576086ae\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.225922 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cbe1983d-dc1b-4350-98e3-b7db576086ae-logs" (OuterVolumeSpecName: "logs") pod "cbe1983d-dc1b-4350-98e3-b7db576086ae" (UID: "cbe1983d-dc1b-4350-98e3-b7db576086ae"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.226116 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"cbe1983d-dc1b-4350-98e3-b7db576086ae\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.226140 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-config-data\") pod \"cbe1983d-dc1b-4350-98e3-b7db576086ae\" (UID: \"cbe1983d-dc1b-4350-98e3-b7db576086ae\") " Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.226180 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cbe1983d-dc1b-4350-98e3-b7db576086ae-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "cbe1983d-dc1b-4350-98e3-b7db576086ae" (UID: "cbe1983d-dc1b-4350-98e3-b7db576086ae"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.226745 4935 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cbe1983d-dc1b-4350-98e3-b7db576086ae-logs\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.226762 4935 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cbe1983d-dc1b-4350-98e3-b7db576086ae-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.232305 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "cbe1983d-dc1b-4350-98e3-b7db576086ae" (UID: "cbe1983d-dc1b-4350-98e3-b7db576086ae"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.232916 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbe1983d-dc1b-4350-98e3-b7db576086ae-kube-api-access-zfzx6" (OuterVolumeSpecName: "kube-api-access-zfzx6") pod "cbe1983d-dc1b-4350-98e3-b7db576086ae" (UID: "cbe1983d-dc1b-4350-98e3-b7db576086ae"). InnerVolumeSpecName "kube-api-access-zfzx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.233020 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-scripts" (OuterVolumeSpecName: "scripts") pod "cbe1983d-dc1b-4350-98e3-b7db576086ae" (UID: "cbe1983d-dc1b-4350-98e3-b7db576086ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.270402 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cbe1983d-dc1b-4350-98e3-b7db576086ae" (UID: "cbe1983d-dc1b-4350-98e3-b7db576086ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.294632 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-config-data" (OuterVolumeSpecName: "config-data") pod "cbe1983d-dc1b-4350-98e3-b7db576086ae" (UID: "cbe1983d-dc1b-4350-98e3-b7db576086ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.328432 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.328488 4935 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.328498 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.328507 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbe1983d-dc1b-4350-98e3-b7db576086ae-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.328516 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfzx6\" (UniqueName: \"kubernetes.io/projected/cbe1983d-dc1b-4350-98e3-b7db576086ae-kube-api-access-zfzx6\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.353841 4935 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.431363 4935 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.559167 4935 generic.go:334] "Generic (PLEG): container finished" podID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" containerID="072a445725877b1568f887e71245dd31efaac281dc614a27dce1ab713666d72a" exitCode=0 Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.559247 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbbbd" event={"ID":"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f","Type":"ContainerDied","Data":"072a445725877b1568f887e71245dd31efaac281dc614a27dce1ab713666d72a"} Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.563338 4935 generic.go:334] "Generic (PLEG): container finished" podID="cbe1983d-dc1b-4350-98e3-b7db576086ae" containerID="3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db" exitCode=143 Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.563397 4935 generic.go:334] "Generic (PLEG): container finished" podID="cbe1983d-dc1b-4350-98e3-b7db576086ae" containerID="8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68" exitCode=143 Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.563483 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cbe1983d-dc1b-4350-98e3-b7db576086ae","Type":"ContainerDied","Data":"3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db"} Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.563519 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cbe1983d-dc1b-4350-98e3-b7db576086ae","Type":"ContainerDied","Data":"8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68"} Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.563535 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cbe1983d-dc1b-4350-98e3-b7db576086ae","Type":"ContainerDied","Data":"69ec840f5e448c96928426eba9b95f2ca694c8e4f98b49966c9956e9444aadd6"} Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.563572 4935 scope.go:117] "RemoveContainer" containerID="3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.563751 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.582887 4935 generic.go:334] "Generic (PLEG): container finished" podID="5089e065-96a2-4154-ab96-4ad8a2a0aa56" containerID="d5b60a3f24877c12432df58f3eba7b3252c623908680f6cd73aa7abb1893dad6" exitCode=143 Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.582917 4935 generic.go:334] "Generic (PLEG): container finished" podID="5089e065-96a2-4154-ab96-4ad8a2a0aa56" containerID="5b8d2b6668e3630a42c06ea8253ffb829aa9ddf073068173bb9f268f17fe0240" exitCode=143 Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.583112 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5089e065-96a2-4154-ab96-4ad8a2a0aa56","Type":"ContainerDied","Data":"d5b60a3f24877c12432df58f3eba7b3252c623908680f6cd73aa7abb1893dad6"} Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.583165 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5089e065-96a2-4154-ab96-4ad8a2a0aa56","Type":"ContainerDied","Data":"5b8d2b6668e3630a42c06ea8253ffb829aa9ddf073068173bb9f268f17fe0240"} Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.617998 4935 scope.go:117] "RemoveContainer" containerID="8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.620445 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.636983 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.643034 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:54:31 crc kubenswrapper[4935]: E1201 18:54:31.643497 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbe1983d-dc1b-4350-98e3-b7db576086ae" containerName="glance-httpd" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.643509 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbe1983d-dc1b-4350-98e3-b7db576086ae" containerName="glance-httpd" Dec 01 18:54:31 crc kubenswrapper[4935]: E1201 18:54:31.643527 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbe1983d-dc1b-4350-98e3-b7db576086ae" containerName="glance-log" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.643534 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbe1983d-dc1b-4350-98e3-b7db576086ae" containerName="glance-log" Dec 01 18:54:31 crc kubenswrapper[4935]: E1201 18:54:31.643554 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c07307f-5812-4ba6-bf01-56e22121c34d" containerName="init" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.643562 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c07307f-5812-4ba6-bf01-56e22121c34d" containerName="init" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.643752 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbe1983d-dc1b-4350-98e3-b7db576086ae" containerName="glance-log" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.643765 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbe1983d-dc1b-4350-98e3-b7db576086ae" containerName="glance-httpd" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.643777 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c07307f-5812-4ba6-bf01-56e22121c34d" containerName="init" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.644961 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.647317 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.647531 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.655773 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.667830 4935 scope.go:117] "RemoveContainer" containerID="3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db" Dec 01 18:54:31 crc kubenswrapper[4935]: E1201 18:54:31.677758 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db\": container with ID starting with 3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db not found: ID does not exist" containerID="3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.677797 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db"} err="failed to get container status \"3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db\": rpc error: code = NotFound desc = could not find container \"3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db\": container with ID starting with 3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db not found: ID does not exist" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.677822 4935 scope.go:117] "RemoveContainer" containerID="8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68" Dec 01 18:54:31 crc kubenswrapper[4935]: E1201 18:54:31.678521 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68\": container with ID starting with 8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68 not found: ID does not exist" containerID="8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.678561 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68"} err="failed to get container status \"8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68\": rpc error: code = NotFound desc = could not find container \"8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68\": container with ID starting with 8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68 not found: ID does not exist" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.678590 4935 scope.go:117] "RemoveContainer" containerID="3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.682620 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db"} err="failed to get container status \"3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db\": rpc error: code = NotFound desc = could not find container \"3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db\": container with ID starting with 3ea824bfac4d7a7f0972a0628df0e73b4216a4459c6f29fae1003ccc070e01db not found: ID does not exist" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.682659 4935 scope.go:117] "RemoveContainer" containerID="8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.685775 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68"} err="failed to get container status \"8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68\": rpc error: code = NotFound desc = could not find container \"8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68\": container with ID starting with 8e1ceac6c7aa40db05f20033eb0cfd6d819057ed8e495f19644227aa9508ee68 not found: ID does not exist" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.724880 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6f7cd766b5-766jz"] Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.727132 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.732655 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.732972 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.749756 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f7cd766b5-766jz"] Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839164 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-config\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839225 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfs4g\" (UniqueName: \"kubernetes.io/projected/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-kube-api-access-hfs4g\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839284 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839301 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d586b231-e06e-4111-8f29-0b8d7c12eccc-logs\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839317 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-internal-tls-certs\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839348 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2svm\" (UniqueName: \"kubernetes.io/projected/d586b231-e06e-4111-8f29-0b8d7c12eccc-kube-api-access-l2svm\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839368 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-ovndb-tls-certs\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839388 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-public-tls-certs\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839420 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-httpd-config\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839437 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839467 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839489 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839725 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-combined-ca-bundle\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839790 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d586b231-e06e-4111-8f29-0b8d7c12eccc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.839831 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949006 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949085 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-combined-ca-bundle\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949126 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d586b231-e06e-4111-8f29-0b8d7c12eccc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949166 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949187 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-config\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949220 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfs4g\" (UniqueName: \"kubernetes.io/projected/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-kube-api-access-hfs4g\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949269 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949283 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d586b231-e06e-4111-8f29-0b8d7c12eccc-logs\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949299 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-internal-tls-certs\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949330 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2svm\" (UniqueName: \"kubernetes.io/projected/d586b231-e06e-4111-8f29-0b8d7c12eccc-kube-api-access-l2svm\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949346 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-ovndb-tls-certs\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949364 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-public-tls-certs\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949394 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-httpd-config\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949410 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.949437 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.951041 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d586b231-e06e-4111-8f29-0b8d7c12eccc-logs\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.951602 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.951832 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d586b231-e06e-4111-8f29-0b8d7c12eccc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.958584 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.961791 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.962925 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-config\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.964383 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-combined-ca-bundle\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.966954 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.987918 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-public-tls-certs\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.988022 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.988133 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-internal-tls-certs\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.992729 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2svm\" (UniqueName: \"kubernetes.io/projected/d586b231-e06e-4111-8f29-0b8d7c12eccc-kube-api-access-l2svm\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.992800 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfs4g\" (UniqueName: \"kubernetes.io/projected/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-kube-api-access-hfs4g\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:31 crc kubenswrapper[4935]: I1201 18:54:31.993045 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-httpd-config\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.008756 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2e120ac-43cf-4de1-ba58-b0418d6ba9dd-ovndb-tls-certs\") pod \"neutron-6f7cd766b5-766jz\" (UID: \"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd\") " pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.013286 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.046243 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.278868 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.412549 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bwph6" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.432657 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.469027 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-combined-ca-bundle\") pod \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.469075 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvm9d\" (UniqueName: \"kubernetes.io/projected/5089e065-96a2-4154-ab96-4ad8a2a0aa56-kube-api-access-vvm9d\") pod \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.469100 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae58d3fe-1a16-467c-b5c9-9522cb473a03-logs\") pod \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.469131 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-config-data\") pod \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.469334 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5089e065-96a2-4154-ab96-4ad8a2a0aa56-logs\") pod \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.469430 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9xb4\" (UniqueName: \"kubernetes.io/projected/ae58d3fe-1a16-467c-b5c9-9522cb473a03-kube-api-access-m9xb4\") pod \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.469453 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-scripts\") pod \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.469469 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5089e065-96a2-4154-ab96-4ad8a2a0aa56-httpd-run\") pod \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.469550 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-combined-ca-bundle\") pod \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.469588 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.469604 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-scripts\") pod \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\" (UID: \"5089e065-96a2-4154-ab96-4ad8a2a0aa56\") " Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.469627 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-config-data\") pod \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\" (UID: \"ae58d3fe-1a16-467c-b5c9-9522cb473a03\") " Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.470213 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae58d3fe-1a16-467c-b5c9-9522cb473a03-logs" (OuterVolumeSpecName: "logs") pod "ae58d3fe-1a16-467c-b5c9-9522cb473a03" (UID: "ae58d3fe-1a16-467c-b5c9-9522cb473a03"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.472524 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5089e065-96a2-4154-ab96-4ad8a2a0aa56-logs" (OuterVolumeSpecName: "logs") pod "5089e065-96a2-4154-ab96-4ad8a2a0aa56" (UID: "5089e065-96a2-4154-ab96-4ad8a2a0aa56"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.473437 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5089e065-96a2-4154-ab96-4ad8a2a0aa56-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5089e065-96a2-4154-ab96-4ad8a2a0aa56" (UID: "5089e065-96a2-4154-ab96-4ad8a2a0aa56"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.482129 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5089e065-96a2-4154-ab96-4ad8a2a0aa56-kube-api-access-vvm9d" (OuterVolumeSpecName: "kube-api-access-vvm9d") pod "5089e065-96a2-4154-ab96-4ad8a2a0aa56" (UID: "5089e065-96a2-4154-ab96-4ad8a2a0aa56"). InnerVolumeSpecName "kube-api-access-vvm9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.489347 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae58d3fe-1a16-467c-b5c9-9522cb473a03-kube-api-access-m9xb4" (OuterVolumeSpecName: "kube-api-access-m9xb4") pod "ae58d3fe-1a16-467c-b5c9-9522cb473a03" (UID: "ae58d3fe-1a16-467c-b5c9-9522cb473a03"). InnerVolumeSpecName "kube-api-access-m9xb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.499365 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "5089e065-96a2-4154-ab96-4ad8a2a0aa56" (UID: "5089e065-96a2-4154-ab96-4ad8a2a0aa56"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.499869 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-scripts" (OuterVolumeSpecName: "scripts") pod "5089e065-96a2-4154-ab96-4ad8a2a0aa56" (UID: "5089e065-96a2-4154-ab96-4ad8a2a0aa56"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.528305 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-scripts" (OuterVolumeSpecName: "scripts") pod "ae58d3fe-1a16-467c-b5c9-9522cb473a03" (UID: "ae58d3fe-1a16-467c-b5c9-9522cb473a03"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.576537 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae58d3fe-1a16-467c-b5c9-9522cb473a03" (UID: "ae58d3fe-1a16-467c-b5c9-9522cb473a03"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.582303 4935 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.583734 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.583747 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.583772 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvm9d\" (UniqueName: \"kubernetes.io/projected/5089e065-96a2-4154-ab96-4ad8a2a0aa56-kube-api-access-vvm9d\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.583783 4935 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae58d3fe-1a16-467c-b5c9-9522cb473a03-logs\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.583792 4935 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5089e065-96a2-4154-ab96-4ad8a2a0aa56-logs\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.583806 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9xb4\" (UniqueName: \"kubernetes.io/projected/ae58d3fe-1a16-467c-b5c9-9522cb473a03-kube-api-access-m9xb4\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.583816 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.583825 4935 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5089e065-96a2-4154-ab96-4ad8a2a0aa56-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.615377 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-config-data" (OuterVolumeSpecName: "config-data") pod "5089e065-96a2-4154-ab96-4ad8a2a0aa56" (UID: "5089e065-96a2-4154-ab96-4ad8a2a0aa56"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.618244 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-config-data" (OuterVolumeSpecName: "config-data") pod "ae58d3fe-1a16-467c-b5c9-9522cb473a03" (UID: "ae58d3fe-1a16-467c-b5c9-9522cb473a03"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.631690 4935 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.658951 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c07307f-5812-4ba6-bf01-56e22121c34d" path="/var/lib/kubelet/pods/9c07307f-5812-4ba6-bf01-56e22121c34d/volumes" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.660850 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbe1983d-dc1b-4350-98e3-b7db576086ae" path="/var/lib/kubelet/pods/cbe1983d-dc1b-4350-98e3-b7db576086ae/volumes" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.692012 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.692052 4935 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.692066 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae58d3fe-1a16-467c-b5c9-9522cb473a03-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.708315 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bwph6" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.709258 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bwph6" event={"ID":"ae58d3fe-1a16-467c-b5c9-9522cb473a03","Type":"ContainerDied","Data":"262e1a3fa876da77966e5b8a8ff2e47df03bd612154889550ca3787d143a3598"} Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.709302 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="262e1a3fa876da77966e5b8a8ff2e47df03bd612154889550ca3787d143a3598" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.721231 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5089e065-96a2-4154-ab96-4ad8a2a0aa56","Type":"ContainerDied","Data":"e3fc9f955f3a84f56df328f2dfbacc68a802bf2916456fdcf154071838d014d3"} Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.721274 4935 scope.go:117] "RemoveContainer" containerID="d5b60a3f24877c12432df58f3eba7b3252c623908680f6cd73aa7abb1893dad6" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.721414 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.722245 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5089e065-96a2-4154-ab96-4ad8a2a0aa56" (UID: "5089e065-96a2-4154-ab96-4ad8a2a0aa56"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.775857 4935 scope.go:117] "RemoveContainer" containerID="5b8d2b6668e3630a42c06ea8253ffb829aa9ddf073068173bb9f268f17fe0240" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.799723 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5089e065-96a2-4154-ab96-4ad8a2a0aa56-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.809805 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.860952 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.880480 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:54:32 crc kubenswrapper[4935]: E1201 18:54:32.881117 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae58d3fe-1a16-467c-b5c9-9522cb473a03" containerName="placement-db-sync" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.881156 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae58d3fe-1a16-467c-b5c9-9522cb473a03" containerName="placement-db-sync" Dec 01 18:54:32 crc kubenswrapper[4935]: E1201 18:54:32.881219 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5089e065-96a2-4154-ab96-4ad8a2a0aa56" containerName="glance-httpd" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.881234 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5089e065-96a2-4154-ab96-4ad8a2a0aa56" containerName="glance-httpd" Dec 01 18:54:32 crc kubenswrapper[4935]: E1201 18:54:32.881259 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5089e065-96a2-4154-ab96-4ad8a2a0aa56" containerName="glance-log" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.881268 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5089e065-96a2-4154-ab96-4ad8a2a0aa56" containerName="glance-log" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.881522 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="5089e065-96a2-4154-ab96-4ad8a2a0aa56" containerName="glance-httpd" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.881560 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae58d3fe-1a16-467c-b5c9-9522cb473a03" containerName="placement-db-sync" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.881573 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="5089e065-96a2-4154-ab96-4ad8a2a0aa56" containerName="glance-log" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.883058 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.899641 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.899819 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.919192 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:54:32 crc kubenswrapper[4935]: I1201 18:54:32.934421 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f7cd766b5-766jz"] Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.007130 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-config-data\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.007372 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9727d7a-7291-4a7a-9398-dddf85dd8d38-logs\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.007421 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-scripts\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.007468 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.007506 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bh474\" (UniqueName: \"kubernetes.io/projected/e9727d7a-7291-4a7a-9398-dddf85dd8d38-kube-api-access-bh474\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.007534 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.007559 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e9727d7a-7291-4a7a-9398-dddf85dd8d38-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.007599 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.007899 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5d64f494d8-2clmq"] Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.009772 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.016752 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-rc6l5" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.016880 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.017018 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.017099 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.017648 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.046282 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5d64f494d8-2clmq"] Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110063 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-scripts\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110122 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/954e6aa6-2067-4489-83e8-390033553c3e-logs\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110172 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110213 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bh474\" (UniqueName: \"kubernetes.io/projected/e9727d7a-7291-4a7a-9398-dddf85dd8d38-kube-api-access-bh474\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110236 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-scripts\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110254 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110276 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-public-tls-certs\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110304 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e9727d7a-7291-4a7a-9398-dddf85dd8d38-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110323 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-combined-ca-bundle\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110348 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-config-data\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110381 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110420 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k48mw\" (UniqueName: \"kubernetes.io/projected/954e6aa6-2067-4489-83e8-390033553c3e-kube-api-access-k48mw\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110456 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-config-data\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110472 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-internal-tls-certs\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110495 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9727d7a-7291-4a7a-9398-dddf85dd8d38-logs\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.110915 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9727d7a-7291-4a7a-9398-dddf85dd8d38-logs\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.112618 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.115593 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e9727d7a-7291-4a7a-9398-dddf85dd8d38-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.116085 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-scripts\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.144390 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-config-data\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.144843 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.154081 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.157969 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bh474\" (UniqueName: \"kubernetes.io/projected/e9727d7a-7291-4a7a-9398-dddf85dd8d38-kube-api-access-bh474\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.161335 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.212716 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-scripts\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.212771 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-public-tls-certs\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.212808 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-combined-ca-bundle\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.212842 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-config-data\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.212910 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k48mw\" (UniqueName: \"kubernetes.io/projected/954e6aa6-2067-4489-83e8-390033553c3e-kube-api-access-k48mw\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.212958 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-internal-tls-certs\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.213040 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/954e6aa6-2067-4489-83e8-390033553c3e-logs\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.213592 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/954e6aa6-2067-4489-83e8-390033553c3e-logs\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.217037 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-internal-tls-certs\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.217436 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-scripts\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.219863 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-combined-ca-bundle\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.220265 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-config-data\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.220942 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/954e6aa6-2067-4489-83e8-390033553c3e-public-tls-certs\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.223505 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.227924 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k48mw\" (UniqueName: \"kubernetes.io/projected/954e6aa6-2067-4489-83e8-390033553c3e-kube-api-access-k48mw\") pod \"placement-5d64f494d8-2clmq\" (UID: \"954e6aa6-2067-4489-83e8-390033553c3e\") " pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: W1201 18:54:33.243734 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd586b231_e06e_4111_8f29_0b8d7c12eccc.slice/crio-519bd4cbf19868850c86bf554e1d622bfd33819495879fb217e04c3296458509 WatchSource:0}: Error finding container 519bd4cbf19868850c86bf554e1d622bfd33819495879fb217e04c3296458509: Status 404 returned error can't find the container with id 519bd4cbf19868850c86bf554e1d622bfd33819495879fb217e04c3296458509 Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.256958 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.382720 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.741986 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d586b231-e06e-4111-8f29-0b8d7c12eccc","Type":"ContainerStarted","Data":"519bd4cbf19868850c86bf554e1d622bfd33819495879fb217e04c3296458509"} Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.744935 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f7cd766b5-766jz" event={"ID":"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd","Type":"ContainerStarted","Data":"f986715db753c1178b1971507e60f53994ba9d560d73d7154c6339ce0bb631ea"} Dec 01 18:54:33 crc kubenswrapper[4935]: I1201 18:54:33.891606 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5d64f494d8-2clmq"] Dec 01 18:54:33 crc kubenswrapper[4935]: W1201 18:54:33.894873 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod954e6aa6_2067_4489_83e8_390033553c3e.slice/crio-c75c62059f95ef37e20390b8147c888d789c581cbcac24b424d68765fd6ff938 WatchSource:0}: Error finding container c75c62059f95ef37e20390b8147c888d789c581cbcac24b424d68765fd6ff938: Status 404 returned error can't find the container with id c75c62059f95ef37e20390b8147c888d789c581cbcac24b424d68765fd6ff938 Dec 01 18:54:34 crc kubenswrapper[4935]: I1201 18:54:34.228689 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:54:34 crc kubenswrapper[4935]: I1201 18:54:34.533671 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5089e065-96a2-4154-ab96-4ad8a2a0aa56" path="/var/lib/kubelet/pods/5089e065-96a2-4154-ab96-4ad8a2a0aa56/volumes" Dec 01 18:54:34 crc kubenswrapper[4935]: I1201 18:54:34.759370 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5d64f494d8-2clmq" event={"ID":"954e6aa6-2067-4489-83e8-390033553c3e","Type":"ContainerStarted","Data":"d9300e38dfdcd21cca2d305a43f88d6c544cf03c8c7c76978d4891a670fcccab"} Dec 01 18:54:34 crc kubenswrapper[4935]: I1201 18:54:34.759415 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5d64f494d8-2clmq" event={"ID":"954e6aa6-2067-4489-83e8-390033553c3e","Type":"ContainerStarted","Data":"c75c62059f95ef37e20390b8147c888d789c581cbcac24b424d68765fd6ff938"} Dec 01 18:54:34 crc kubenswrapper[4935]: I1201 18:54:34.762307 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbbbd" event={"ID":"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f","Type":"ContainerStarted","Data":"aa3ff824d527262d003d5e092b1360a26e74649a1613ad8ba8426b60df0f1b4c"} Dec 01 18:54:34 crc kubenswrapper[4935]: I1201 18:54:34.771285 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f7cd766b5-766jz" event={"ID":"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd","Type":"ContainerStarted","Data":"37d73c1bed05cc97da15b3cd4f09f24f81c2838e6956c984dd2c5fa254d58c46"} Dec 01 18:54:34 crc kubenswrapper[4935]: I1201 18:54:34.771320 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f7cd766b5-766jz" event={"ID":"e2e120ac-43cf-4de1-ba58-b0418d6ba9dd","Type":"ContainerStarted","Data":"e3399235483c293cba7db7906440269e892e0d06c1d2042ab7b5d1cd90fa3b30"} Dec 01 18:54:34 crc kubenswrapper[4935]: I1201 18:54:34.771471 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:54:34 crc kubenswrapper[4935]: I1201 18:54:34.772495 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e9727d7a-7291-4a7a-9398-dddf85dd8d38","Type":"ContainerStarted","Data":"3612ff548c1df4b50c4e6ba913b04d9616163d65cd15f62ca1e44a91b33d5e65"} Dec 01 18:54:34 crc kubenswrapper[4935]: I1201 18:54:34.776782 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d586b231-e06e-4111-8f29-0b8d7c12eccc","Type":"ContainerStarted","Data":"da3dd5309546b5603d19f4c86102b686f99bf6418473290102ee266321691dde"} Dec 01 18:54:34 crc kubenswrapper[4935]: I1201 18:54:34.786524 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nbbbd" podStartSLOduration=20.977098031 podStartE2EDuration="26.7865043s" podCreationTimestamp="2025-12-01 18:54:08 +0000 UTC" firstStartedPulling="2025-12-01 18:54:28.377513835 +0000 UTC m=+1482.399143094" lastFinishedPulling="2025-12-01 18:54:34.186920094 +0000 UTC m=+1488.208549363" observedRunningTime="2025-12-01 18:54:34.779097481 +0000 UTC m=+1488.800726740" watchObservedRunningTime="2025-12-01 18:54:34.7865043 +0000 UTC m=+1488.808133559" Dec 01 18:54:34 crc kubenswrapper[4935]: I1201 18:54:34.802462 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6f7cd766b5-766jz" podStartSLOduration=3.802445605 podStartE2EDuration="3.802445605s" podCreationTimestamp="2025-12-01 18:54:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:54:34.799708461 +0000 UTC m=+1488.821337720" watchObservedRunningTime="2025-12-01 18:54:34.802445605 +0000 UTC m=+1488.824074864" Dec 01 18:54:35 crc kubenswrapper[4935]: I1201 18:54:35.797912 4935 generic.go:334] "Generic (PLEG): container finished" podID="56e181ed-d603-4a53-aa57-222d888ce5a2" containerID="6fe0e44ed0de290991d50c077f3a0f53acd6c7fa8f04a427206ac15bc2a4307a" exitCode=0 Dec 01 18:54:35 crc kubenswrapper[4935]: I1201 18:54:35.799411 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bb44q" event={"ID":"56e181ed-d603-4a53-aa57-222d888ce5a2","Type":"ContainerDied","Data":"6fe0e44ed0de290991d50c077f3a0f53acd6c7fa8f04a427206ac15bc2a4307a"} Dec 01 18:54:36 crc kubenswrapper[4935]: I1201 18:54:36.360411 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:54:36 crc kubenswrapper[4935]: I1201 18:54:36.435568 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-xdvvz"] Dec 01 18:54:36 crc kubenswrapper[4935]: I1201 18:54:36.435851 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" podUID="939e12e9-7833-49fe-93f7-8ea93afac15f" containerName="dnsmasq-dns" containerID="cri-o://232e05af677a9738c1d83fb6168636f74bf6c21877e70bb5d87f2ad805516289" gracePeriod=10 Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.194395 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.319828 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-combined-ca-bundle\") pod \"56e181ed-d603-4a53-aa57-222d888ce5a2\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.320303 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-scripts\") pod \"56e181ed-d603-4a53-aa57-222d888ce5a2\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.320490 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wn5tn\" (UniqueName: \"kubernetes.io/projected/56e181ed-d603-4a53-aa57-222d888ce5a2-kube-api-access-wn5tn\") pod \"56e181ed-d603-4a53-aa57-222d888ce5a2\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.321016 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-credential-keys\") pod \"56e181ed-d603-4a53-aa57-222d888ce5a2\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.321386 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-config-data\") pod \"56e181ed-d603-4a53-aa57-222d888ce5a2\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.321574 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-fernet-keys\") pod \"56e181ed-d603-4a53-aa57-222d888ce5a2\" (UID: \"56e181ed-d603-4a53-aa57-222d888ce5a2\") " Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.327768 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-scripts" (OuterVolumeSpecName: "scripts") pod "56e181ed-d603-4a53-aa57-222d888ce5a2" (UID: "56e181ed-d603-4a53-aa57-222d888ce5a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.328268 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "56e181ed-d603-4a53-aa57-222d888ce5a2" (UID: "56e181ed-d603-4a53-aa57-222d888ce5a2"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.330297 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56e181ed-d603-4a53-aa57-222d888ce5a2-kube-api-access-wn5tn" (OuterVolumeSpecName: "kube-api-access-wn5tn") pod "56e181ed-d603-4a53-aa57-222d888ce5a2" (UID: "56e181ed-d603-4a53-aa57-222d888ce5a2"). InnerVolumeSpecName "kube-api-access-wn5tn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.334038 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "56e181ed-d603-4a53-aa57-222d888ce5a2" (UID: "56e181ed-d603-4a53-aa57-222d888ce5a2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.362878 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-config-data" (OuterVolumeSpecName: "config-data") pod "56e181ed-d603-4a53-aa57-222d888ce5a2" (UID: "56e181ed-d603-4a53-aa57-222d888ce5a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.392348 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "56e181ed-d603-4a53-aa57-222d888ce5a2" (UID: "56e181ed-d603-4a53-aa57-222d888ce5a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.425726 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.425787 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.425801 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wn5tn\" (UniqueName: \"kubernetes.io/projected/56e181ed-d603-4a53-aa57-222d888ce5a2-kube-api-access-wn5tn\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.425814 4935 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.425826 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.425838 4935 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56e181ed-d603-4a53-aa57-222d888ce5a2-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.824524 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d586b231-e06e-4111-8f29-0b8d7c12eccc","Type":"ContainerStarted","Data":"bf23f543147880e332d6f460edcee9195c5886c4de4b7b1053a708774872c4e5"} Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.826801 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bb44q" event={"ID":"56e181ed-d603-4a53-aa57-222d888ce5a2","Type":"ContainerDied","Data":"69e637c148afc0c09a771ff7bd928c42f0c80724f44deb2f82f94844c13abfed"} Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.826828 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69e637c148afc0c09a771ff7bd928c42f0c80724f44deb2f82f94844c13abfed" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.826876 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bb44q" Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.830321 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5d64f494d8-2clmq" event={"ID":"954e6aa6-2067-4489-83e8-390033553c3e","Type":"ContainerStarted","Data":"05b25d1a45bf1818a6702c5fefbf1fab2899228d8ccb62647036186687cb714e"} Dec 01 18:54:37 crc kubenswrapper[4935]: I1201 18:54:37.839087 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e9727d7a-7291-4a7a-9398-dddf85dd8d38","Type":"ContainerStarted","Data":"32027a8c777a3393338fb25e0d927ed32990f30dcb0032ac16c7f389122ccc6e"} Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.026852 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6fb8649598-lsccp"] Dec 01 18:54:38 crc kubenswrapper[4935]: E1201 18:54:38.027537 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e181ed-d603-4a53-aa57-222d888ce5a2" containerName="keystone-bootstrap" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.027557 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e181ed-d603-4a53-aa57-222d888ce5a2" containerName="keystone-bootstrap" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.027826 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="56e181ed-d603-4a53-aa57-222d888ce5a2" containerName="keystone-bootstrap" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.028831 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.036845 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.037054 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.037270 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.037373 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-9jnc7" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.037477 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.037577 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.038323 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6fb8649598-lsccp"] Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.152588 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-scripts\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.152899 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-credential-keys\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.152920 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-fernet-keys\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.152956 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-internal-tls-certs\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.153044 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9brz8\" (UniqueName: \"kubernetes.io/projected/3de16100-5a0b-457e-b23a-6efec4cca38e-kube-api-access-9brz8\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.153093 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-public-tls-certs\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.153110 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-config-data\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.153357 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-combined-ca-bundle\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.255597 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-scripts\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.255676 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-credential-keys\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.255696 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-fernet-keys\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.255731 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-internal-tls-certs\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.255784 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9brz8\" (UniqueName: \"kubernetes.io/projected/3de16100-5a0b-457e-b23a-6efec4cca38e-kube-api-access-9brz8\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.255842 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-public-tls-certs\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.255867 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-config-data\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.255936 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-combined-ca-bundle\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.282285 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-public-tls-certs\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.285089 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-config-data\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.285667 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-internal-tls-certs\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.286251 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9brz8\" (UniqueName: \"kubernetes.io/projected/3de16100-5a0b-457e-b23a-6efec4cca38e-kube-api-access-9brz8\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.288942 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-scripts\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.292007 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-fernet-keys\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.292541 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-credential-keys\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.319771 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3de16100-5a0b-457e-b23a-6efec4cca38e-combined-ca-bundle\") pod \"keystone-6fb8649598-lsccp\" (UID: \"3de16100-5a0b-457e-b23a-6efec4cca38e\") " pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.397923 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.852774 4935 generic.go:334] "Generic (PLEG): container finished" podID="939e12e9-7833-49fe-93f7-8ea93afac15f" containerID="232e05af677a9738c1d83fb6168636f74bf6c21877e70bb5d87f2ad805516289" exitCode=0 Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.853184 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" event={"ID":"939e12e9-7833-49fe-93f7-8ea93afac15f","Type":"ContainerDied","Data":"232e05af677a9738c1d83fb6168636f74bf6c21877e70bb5d87f2ad805516289"} Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.854999 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e9727d7a-7291-4a7a-9398-dddf85dd8d38","Type":"ContainerStarted","Data":"bf1a46b894937ea94e3405544c99c12f78a9869aff8512e2b54edf9ca9e05462"} Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.855192 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.856013 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.892525 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.892505939 podStartE2EDuration="7.892505939s" podCreationTimestamp="2025-12-01 18:54:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:54:38.889117004 +0000 UTC m=+1492.910746263" watchObservedRunningTime="2025-12-01 18:54:38.892505939 +0000 UTC m=+1492.914135198" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.926057 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.926039039 podStartE2EDuration="6.926039039s" podCreationTimestamp="2025-12-01 18:54:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:54:38.914067817 +0000 UTC m=+1492.935697076" watchObservedRunningTime="2025-12-01 18:54:38.926039039 +0000 UTC m=+1492.947668298" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.946962 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5d64f494d8-2clmq" podStartSLOduration=6.946938966 podStartE2EDuration="6.946938966s" podCreationTimestamp="2025-12-01 18:54:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:54:38.931466147 +0000 UTC m=+1492.953095406" watchObservedRunningTime="2025-12-01 18:54:38.946938966 +0000 UTC m=+1492.968568225" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.952128 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:38 crc kubenswrapper[4935]: I1201 18:54:38.952199 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:39 crc kubenswrapper[4935]: I1201 18:54:39.995857 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nbbbd" podUID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" containerName="registry-server" probeResult="failure" output=< Dec 01 18:54:39 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 18:54:39 crc kubenswrapper[4935]: > Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.416547 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.518092 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-ovsdbserver-nb\") pod \"939e12e9-7833-49fe-93f7-8ea93afac15f\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.519357 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-dns-svc\") pod \"939e12e9-7833-49fe-93f7-8ea93afac15f\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.519474 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-dns-swift-storage-0\") pod \"939e12e9-7833-49fe-93f7-8ea93afac15f\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.520845 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-config\") pod \"939e12e9-7833-49fe-93f7-8ea93afac15f\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.520906 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-628ws\" (UniqueName: \"kubernetes.io/projected/939e12e9-7833-49fe-93f7-8ea93afac15f-kube-api-access-628ws\") pod \"939e12e9-7833-49fe-93f7-8ea93afac15f\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.520989 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-ovsdbserver-sb\") pod \"939e12e9-7833-49fe-93f7-8ea93afac15f\" (UID: \"939e12e9-7833-49fe-93f7-8ea93afac15f\") " Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.532154 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/939e12e9-7833-49fe-93f7-8ea93afac15f-kube-api-access-628ws" (OuterVolumeSpecName: "kube-api-access-628ws") pod "939e12e9-7833-49fe-93f7-8ea93afac15f" (UID: "939e12e9-7833-49fe-93f7-8ea93afac15f"). InnerVolumeSpecName "kube-api-access-628ws". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.603262 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-config" (OuterVolumeSpecName: "config") pod "939e12e9-7833-49fe-93f7-8ea93afac15f" (UID: "939e12e9-7833-49fe-93f7-8ea93afac15f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.611838 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "939e12e9-7833-49fe-93f7-8ea93afac15f" (UID: "939e12e9-7833-49fe-93f7-8ea93afac15f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.624918 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "939e12e9-7833-49fe-93f7-8ea93afac15f" (UID: "939e12e9-7833-49fe-93f7-8ea93afac15f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.625261 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.625284 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-628ws\" (UniqueName: \"kubernetes.io/projected/939e12e9-7833-49fe-93f7-8ea93afac15f-kube-api-access-628ws\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.625296 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.625305 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.639203 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "939e12e9-7833-49fe-93f7-8ea93afac15f" (UID: "939e12e9-7833-49fe-93f7-8ea93afac15f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.650753 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "939e12e9-7833-49fe-93f7-8ea93afac15f" (UID: "939e12e9-7833-49fe-93f7-8ea93afac15f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.728206 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.728564 4935 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/939e12e9-7833-49fe-93f7-8ea93afac15f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.882847 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" event={"ID":"939e12e9-7833-49fe-93f7-8ea93afac15f","Type":"ContainerDied","Data":"394bada363e378b00bba7c3679cc4c114c187f9b4d0fce535544e9df03d54686"} Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.882904 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-xdvvz" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.882913 4935 scope.go:117] "RemoveContainer" containerID="232e05af677a9738c1d83fb6168636f74bf6c21877e70bb5d87f2ad805516289" Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.935208 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-xdvvz"] Dec 01 18:54:40 crc kubenswrapper[4935]: I1201 18:54:40.959008 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-xdvvz"] Dec 01 18:54:41 crc kubenswrapper[4935]: I1201 18:54:41.721773 4935 scope.go:117] "RemoveContainer" containerID="419f225fdf81cb309cd729c2228b0cd4678af7f74d70ee306231fa140be115a4" Dec 01 18:54:41 crc kubenswrapper[4935]: I1201 18:54:41.768130 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:54:42 crc kubenswrapper[4935]: W1201 18:54:42.252699 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3de16100_5a0b_457e_b23a_6efec4cca38e.slice/crio-020656b3e8804904cd0995e6c3e38e9eb321715ed84a0684e1fa0b8115998ce7 WatchSource:0}: Error finding container 020656b3e8804904cd0995e6c3e38e9eb321715ed84a0684e1fa0b8115998ce7: Status 404 returned error can't find the container with id 020656b3e8804904cd0995e6c3e38e9eb321715ed84a0684e1fa0b8115998ce7 Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.261405 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6fb8649598-lsccp"] Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.279845 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.281869 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.330905 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.339120 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.529636 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="939e12e9-7833-49fe-93f7-8ea93afac15f" path="/var/lib/kubelet/pods/939e12e9-7833-49fe-93f7-8ea93afac15f/volumes" Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.933567 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-747dz" event={"ID":"7935698e-d40a-4c10-bf91-0a5d8855a09e","Type":"ContainerStarted","Data":"9bd655859982a469f3fa7f88256dd912f7215da1223cbaa4a0d90e6354469a70"} Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.952115 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7392ddad-969f-4a42-86a1-460f3ca2d500","Type":"ContainerStarted","Data":"02c8167fd5312deb5426c6ac35e5b0aec6a2092c004d81bd70495fc30609f141"} Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.958677 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-747dz" podStartSLOduration=3.046892532 podStartE2EDuration="52.958658502s" podCreationTimestamp="2025-12-01 18:53:50 +0000 UTC" firstStartedPulling="2025-12-01 18:53:52.017430601 +0000 UTC m=+1446.039059860" lastFinishedPulling="2025-12-01 18:54:41.929196561 +0000 UTC m=+1495.950825830" observedRunningTime="2025-12-01 18:54:42.954902306 +0000 UTC m=+1496.976531575" watchObservedRunningTime="2025-12-01 18:54:42.958658502 +0000 UTC m=+1496.980287751" Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.962731 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-gk8gw" event={"ID":"1e60c370-8ffd-4b97-a829-176da28bf116","Type":"ContainerStarted","Data":"ca33ca7b137a666f9040e8783881de9988ea8fe540509c7029539cdc971f5f57"} Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.969186 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6fb8649598-lsccp" event={"ID":"3de16100-5a0b-457e-b23a-6efec4cca38e","Type":"ContainerStarted","Data":"020656b3e8804904cd0995e6c3e38e9eb321715ed84a0684e1fa0b8115998ce7"} Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.970057 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.970128 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:42 crc kubenswrapper[4935]: I1201 18:54:42.992599 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-gk8gw" podStartSLOduration=3.673202667 podStartE2EDuration="52.992580373s" podCreationTimestamp="2025-12-01 18:53:50 +0000 UTC" firstStartedPulling="2025-12-01 18:53:52.609436833 +0000 UTC m=+1446.631066092" lastFinishedPulling="2025-12-01 18:54:41.928814549 +0000 UTC m=+1495.950443798" observedRunningTime="2025-12-01 18:54:42.982745589 +0000 UTC m=+1497.004374848" watchObservedRunningTime="2025-12-01 18:54:42.992580373 +0000 UTC m=+1497.014209632" Dec 01 18:54:43 crc kubenswrapper[4935]: I1201 18:54:43.257851 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 18:54:43 crc kubenswrapper[4935]: I1201 18:54:43.258306 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 18:54:43 crc kubenswrapper[4935]: I1201 18:54:43.305584 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 18:54:43 crc kubenswrapper[4935]: I1201 18:54:43.327309 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 18:54:43 crc kubenswrapper[4935]: I1201 18:54:43.983903 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6fb8649598-lsccp" event={"ID":"3de16100-5a0b-457e-b23a-6efec4cca38e","Type":"ContainerStarted","Data":"e91210bf3a78507afa743fde66ac458443aac4f758682441aedb3071daf7869f"} Dec 01 18:54:43 crc kubenswrapper[4935]: I1201 18:54:43.988335 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:54:43 crc kubenswrapper[4935]: I1201 18:54:43.994869 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-c8448" event={"ID":"b8f827a2-a529-4371-8c82-c06377b2c9f2","Type":"ContainerStarted","Data":"3e25ffdbe38bd4a9b28f3317153dffd84e54ae26581824400f54ff9f78ac7e4d"} Dec 01 18:54:43 crc kubenswrapper[4935]: I1201 18:54:43.994924 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 18:54:43 crc kubenswrapper[4935]: I1201 18:54:43.994938 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 18:54:44 crc kubenswrapper[4935]: I1201 18:54:44.015591 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6fb8649598-lsccp" podStartSLOduration=7.015570554 podStartE2EDuration="7.015570554s" podCreationTimestamp="2025-12-01 18:54:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:54:44.003922943 +0000 UTC m=+1498.025552212" watchObservedRunningTime="2025-12-01 18:54:44.015570554 +0000 UTC m=+1498.037199813" Dec 01 18:54:44 crc kubenswrapper[4935]: I1201 18:54:44.031935 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-c8448" podStartSLOduration=4.667593621 podStartE2EDuration="54.031912711s" podCreationTimestamp="2025-12-01 18:53:50 +0000 UTC" firstStartedPulling="2025-12-01 18:53:52.566160071 +0000 UTC m=+1446.587789330" lastFinishedPulling="2025-12-01 18:54:41.930479161 +0000 UTC m=+1495.952108420" observedRunningTime="2025-12-01 18:54:44.028876857 +0000 UTC m=+1498.050506116" watchObservedRunningTime="2025-12-01 18:54:44.031912711 +0000 UTC m=+1498.053541970" Dec 01 18:54:45 crc kubenswrapper[4935]: I1201 18:54:45.004279 4935 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 18:54:45 crc kubenswrapper[4935]: I1201 18:54:45.004308 4935 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 18:54:46 crc kubenswrapper[4935]: I1201 18:54:46.019409 4935 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 18:54:46 crc kubenswrapper[4935]: I1201 18:54:46.020112 4935 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 18:54:46 crc kubenswrapper[4935]: I1201 18:54:46.628456 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 18:54:46 crc kubenswrapper[4935]: I1201 18:54:46.628544 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:46 crc kubenswrapper[4935]: I1201 18:54:46.628677 4935 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 18:54:46 crc kubenswrapper[4935]: I1201 18:54:46.689567 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 18:54:46 crc kubenswrapper[4935]: I1201 18:54:46.775069 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 18:54:49 crc kubenswrapper[4935]: I1201 18:54:49.982326 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nbbbd" podUID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" containerName="registry-server" probeResult="failure" output=< Dec 01 18:54:49 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 18:54:49 crc kubenswrapper[4935]: > Dec 01 18:54:51 crc kubenswrapper[4935]: I1201 18:54:51.087382 4935 generic.go:334] "Generic (PLEG): container finished" podID="1e60c370-8ffd-4b97-a829-176da28bf116" containerID="ca33ca7b137a666f9040e8783881de9988ea8fe540509c7029539cdc971f5f57" exitCode=0 Dec 01 18:54:51 crc kubenswrapper[4935]: I1201 18:54:51.087444 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-gk8gw" event={"ID":"1e60c370-8ffd-4b97-a829-176da28bf116","Type":"ContainerDied","Data":"ca33ca7b137a666f9040e8783881de9988ea8fe540509c7029539cdc971f5f57"} Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.100284 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7392ddad-969f-4a42-86a1-460f3ca2d500","Type":"ContainerStarted","Data":"a93de29124ae8375cb246f75a03012a007af59031b6b08bbae8af934f4208e85"} Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.100736 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="sg-core" containerID="cri-o://02c8167fd5312deb5426c6ac35e5b0aec6a2092c004d81bd70495fc30609f141" gracePeriod=30 Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.100685 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="proxy-httpd" containerID="cri-o://a93de29124ae8375cb246f75a03012a007af59031b6b08bbae8af934f4208e85" gracePeriod=30 Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.100725 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="ceilometer-notification-agent" containerID="cri-o://b14c34b1b0dda050ae97b1323f3ad1f76fce0fc3439cef9b136e325e3819408b" gracePeriod=30 Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.100458 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="ceilometer-central-agent" containerID="cri-o://4629a371e99213e2533baf55c30b880375731480aa38b623530d9a06141e7080" gracePeriod=30 Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.100956 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.105986 4935 generic.go:334] "Generic (PLEG): container finished" podID="7935698e-d40a-4c10-bf91-0a5d8855a09e" containerID="9bd655859982a469f3fa7f88256dd912f7215da1223cbaa4a0d90e6354469a70" exitCode=0 Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.106068 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-747dz" event={"ID":"7935698e-d40a-4c10-bf91-0a5d8855a09e","Type":"ContainerDied","Data":"9bd655859982a469f3fa7f88256dd912f7215da1223cbaa4a0d90e6354469a70"} Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.170186 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.2316224 podStartE2EDuration="1m1.170166372s" podCreationTimestamp="2025-12-01 18:53:51 +0000 UTC" firstStartedPulling="2025-12-01 18:53:52.595862982 +0000 UTC m=+1446.617492231" lastFinishedPulling="2025-12-01 18:54:51.534406884 +0000 UTC m=+1505.556036203" observedRunningTime="2025-12-01 18:54:52.149759519 +0000 UTC m=+1506.171388788" watchObservedRunningTime="2025-12-01 18:54:52.170166372 +0000 UTC m=+1506.191795631" Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.472228 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-gk8gw" Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.640168 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1e60c370-8ffd-4b97-a829-176da28bf116-db-sync-config-data\") pod \"1e60c370-8ffd-4b97-a829-176da28bf116\" (UID: \"1e60c370-8ffd-4b97-a829-176da28bf116\") " Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.640273 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnpps\" (UniqueName: \"kubernetes.io/projected/1e60c370-8ffd-4b97-a829-176da28bf116-kube-api-access-pnpps\") pod \"1e60c370-8ffd-4b97-a829-176da28bf116\" (UID: \"1e60c370-8ffd-4b97-a829-176da28bf116\") " Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.640340 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e60c370-8ffd-4b97-a829-176da28bf116-combined-ca-bundle\") pod \"1e60c370-8ffd-4b97-a829-176da28bf116\" (UID: \"1e60c370-8ffd-4b97-a829-176da28bf116\") " Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.646717 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e60c370-8ffd-4b97-a829-176da28bf116-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "1e60c370-8ffd-4b97-a829-176da28bf116" (UID: "1e60c370-8ffd-4b97-a829-176da28bf116"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.646927 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e60c370-8ffd-4b97-a829-176da28bf116-kube-api-access-pnpps" (OuterVolumeSpecName: "kube-api-access-pnpps") pod "1e60c370-8ffd-4b97-a829-176da28bf116" (UID: "1e60c370-8ffd-4b97-a829-176da28bf116"). InnerVolumeSpecName "kube-api-access-pnpps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.674749 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e60c370-8ffd-4b97-a829-176da28bf116-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1e60c370-8ffd-4b97-a829-176da28bf116" (UID: "1e60c370-8ffd-4b97-a829-176da28bf116"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.743050 4935 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1e60c370-8ffd-4b97-a829-176da28bf116-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.743453 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnpps\" (UniqueName: \"kubernetes.io/projected/1e60c370-8ffd-4b97-a829-176da28bf116-kube-api-access-pnpps\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:52 crc kubenswrapper[4935]: I1201 18:54:52.743466 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e60c370-8ffd-4b97-a829-176da28bf116-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.125711 4935 generic.go:334] "Generic (PLEG): container finished" podID="b8f827a2-a529-4371-8c82-c06377b2c9f2" containerID="3e25ffdbe38bd4a9b28f3317153dffd84e54ae26581824400f54ff9f78ac7e4d" exitCode=0 Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.125820 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-c8448" event={"ID":"b8f827a2-a529-4371-8c82-c06377b2c9f2","Type":"ContainerDied","Data":"3e25ffdbe38bd4a9b28f3317153dffd84e54ae26581824400f54ff9f78ac7e4d"} Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.128463 4935 generic.go:334] "Generic (PLEG): container finished" podID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerID="a93de29124ae8375cb246f75a03012a007af59031b6b08bbae8af934f4208e85" exitCode=0 Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.128494 4935 generic.go:334] "Generic (PLEG): container finished" podID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerID="02c8167fd5312deb5426c6ac35e5b0aec6a2092c004d81bd70495fc30609f141" exitCode=2 Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.128503 4935 generic.go:334] "Generic (PLEG): container finished" podID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerID="4629a371e99213e2533baf55c30b880375731480aa38b623530d9a06141e7080" exitCode=0 Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.128542 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7392ddad-969f-4a42-86a1-460f3ca2d500","Type":"ContainerDied","Data":"a93de29124ae8375cb246f75a03012a007af59031b6b08bbae8af934f4208e85"} Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.128569 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7392ddad-969f-4a42-86a1-460f3ca2d500","Type":"ContainerDied","Data":"02c8167fd5312deb5426c6ac35e5b0aec6a2092c004d81bd70495fc30609f141"} Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.128579 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7392ddad-969f-4a42-86a1-460f3ca2d500","Type":"ContainerDied","Data":"4629a371e99213e2533baf55c30b880375731480aa38b623530d9a06141e7080"} Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.132037 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-gk8gw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.134233 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-gk8gw" event={"ID":"1e60c370-8ffd-4b97-a829-176da28bf116","Type":"ContainerDied","Data":"b250bb0eb77d66ac3f75efd9998c353f6cb24dd3d97d9dc02a8bdda79bfec4cf"} Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.134274 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b250bb0eb77d66ac3f75efd9998c353f6cb24dd3d97d9dc02a8bdda79bfec4cf" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.310472 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6b8c989c57-rgsh8"] Dec 01 18:54:53 crc kubenswrapper[4935]: E1201 18:54:53.311123 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e60c370-8ffd-4b97-a829-176da28bf116" containerName="barbican-db-sync" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.311164 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e60c370-8ffd-4b97-a829-176da28bf116" containerName="barbican-db-sync" Dec 01 18:54:53 crc kubenswrapper[4935]: E1201 18:54:53.311253 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="939e12e9-7833-49fe-93f7-8ea93afac15f" containerName="init" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.311264 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="939e12e9-7833-49fe-93f7-8ea93afac15f" containerName="init" Dec 01 18:54:53 crc kubenswrapper[4935]: E1201 18:54:53.311275 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="939e12e9-7833-49fe-93f7-8ea93afac15f" containerName="dnsmasq-dns" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.311486 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="939e12e9-7833-49fe-93f7-8ea93afac15f" containerName="dnsmasq-dns" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.311751 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e60c370-8ffd-4b97-a829-176da28bf116" containerName="barbican-db-sync" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.311787 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="939e12e9-7833-49fe-93f7-8ea93afac15f" containerName="dnsmasq-dns" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.313345 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.318901 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.319370 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ffllp" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.319440 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.323609 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-684877c94b-g4nmw"] Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.325486 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.337280 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.344430 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6b8c989c57-rgsh8"] Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.365297 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-684877c94b-g4nmw"] Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.376904 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-jtlbc"] Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.382473 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.431789 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-jtlbc"] Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.469337 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rs89\" (UniqueName: \"kubernetes.io/projected/05c203ba-25f0-4331-bf4f-19593176e6a1-kube-api-access-8rs89\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.469425 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05c203ba-25f0-4331-bf4f-19593176e6a1-logs\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.469466 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d980d57-2d10-4065-990f-e381180f2175-combined-ca-bundle\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.469487 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c203ba-25f0-4331-bf4f-19593176e6a1-combined-ca-bundle\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.469506 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c203ba-25f0-4331-bf4f-19593176e6a1-config-data\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.469560 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d980d57-2d10-4065-990f-e381180f2175-config-data\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.469586 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txk8b\" (UniqueName: \"kubernetes.io/projected/4d980d57-2d10-4065-990f-e381180f2175-kube-api-access-txk8b\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.469605 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d980d57-2d10-4065-990f-e381180f2175-logs\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.469689 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d980d57-2d10-4065-990f-e381180f2175-config-data-custom\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.469709 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/05c203ba-25f0-4331-bf4f-19593176e6a1-config-data-custom\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.497402 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-577b88f67d-xws59"] Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.500207 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.503835 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.527208 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-577b88f67d-xws59"] Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.571324 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-config\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.571831 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.571917 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d980d57-2d10-4065-990f-e381180f2175-config-data-custom\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.571997 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/05c203ba-25f0-4331-bf4f-19593176e6a1-config-data-custom\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.572070 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.572179 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rs89\" (UniqueName: \"kubernetes.io/projected/05c203ba-25f0-4331-bf4f-19593176e6a1-kube-api-access-8rs89\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.572301 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05c203ba-25f0-4331-bf4f-19593176e6a1-logs\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.572396 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d980d57-2d10-4065-990f-e381180f2175-combined-ca-bundle\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.572466 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c203ba-25f0-4331-bf4f-19593176e6a1-combined-ca-bundle\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.572538 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c203ba-25f0-4331-bf4f-19593176e6a1-config-data\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.572684 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.572756 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pbg8\" (UniqueName: \"kubernetes.io/projected/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-kube-api-access-7pbg8\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.572827 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d980d57-2d10-4065-990f-e381180f2175-config-data\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.572902 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txk8b\" (UniqueName: \"kubernetes.io/projected/4d980d57-2d10-4065-990f-e381180f2175-kube-api-access-txk8b\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.572969 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d980d57-2d10-4065-990f-e381180f2175-logs\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.573050 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.573588 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05c203ba-25f0-4331-bf4f-19593176e6a1-logs\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.576931 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d980d57-2d10-4065-990f-e381180f2175-logs\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.579190 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c203ba-25f0-4331-bf4f-19593176e6a1-config-data\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.580877 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c203ba-25f0-4331-bf4f-19593176e6a1-combined-ca-bundle\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.582020 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d980d57-2d10-4065-990f-e381180f2175-config-data-custom\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.582161 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d980d57-2d10-4065-990f-e381180f2175-config-data\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.582802 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/05c203ba-25f0-4331-bf4f-19593176e6a1-config-data-custom\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.587548 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d980d57-2d10-4065-990f-e381180f2175-combined-ca-bundle\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.591410 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txk8b\" (UniqueName: \"kubernetes.io/projected/4d980d57-2d10-4065-990f-e381180f2175-kube-api-access-txk8b\") pod \"barbican-keystone-listener-684877c94b-g4nmw\" (UID: \"4d980d57-2d10-4065-990f-e381180f2175\") " pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.591947 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rs89\" (UniqueName: \"kubernetes.io/projected/05c203ba-25f0-4331-bf4f-19593176e6a1-kube-api-access-8rs89\") pod \"barbican-worker-6b8c989c57-rgsh8\" (UID: \"05c203ba-25f0-4331-bf4f-19593176e6a1\") " pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.653096 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b8c989c57-rgsh8" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.656356 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-747dz" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.656937 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.677647 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-config\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.677721 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.677759 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.677780 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-config-data-custom\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.677818 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-config-data\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.677843 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17a9b394-1d22-44af-86f2-0c98ac7b43e4-logs\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.677908 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-combined-ca-bundle\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.677971 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzrz9\" (UniqueName: \"kubernetes.io/projected/17a9b394-1d22-44af-86f2-0c98ac7b43e4-kube-api-access-qzrz9\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.677996 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.678018 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pbg8\" (UniqueName: \"kubernetes.io/projected/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-kube-api-access-7pbg8\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.678055 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.680130 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.680285 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-config\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.681132 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.681186 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.681291 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.704571 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pbg8\" (UniqueName: \"kubernetes.io/projected/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-kube-api-access-7pbg8\") pod \"dnsmasq-dns-848cf88cfc-jtlbc\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.711686 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.779136 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7935698e-d40a-4c10-bf91-0a5d8855a09e-config-data\") pod \"7935698e-d40a-4c10-bf91-0a5d8855a09e\" (UID: \"7935698e-d40a-4c10-bf91-0a5d8855a09e\") " Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.779254 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vj9nw\" (UniqueName: \"kubernetes.io/projected/7935698e-d40a-4c10-bf91-0a5d8855a09e-kube-api-access-vj9nw\") pod \"7935698e-d40a-4c10-bf91-0a5d8855a09e\" (UID: \"7935698e-d40a-4c10-bf91-0a5d8855a09e\") " Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.779534 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7935698e-d40a-4c10-bf91-0a5d8855a09e-combined-ca-bundle\") pod \"7935698e-d40a-4c10-bf91-0a5d8855a09e\" (UID: \"7935698e-d40a-4c10-bf91-0a5d8855a09e\") " Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.779848 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-config-data-custom\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.779883 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-config-data\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.779910 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17a9b394-1d22-44af-86f2-0c98ac7b43e4-logs\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.780286 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-combined-ca-bundle\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.780541 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzrz9\" (UniqueName: \"kubernetes.io/projected/17a9b394-1d22-44af-86f2-0c98ac7b43e4-kube-api-access-qzrz9\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.781847 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17a9b394-1d22-44af-86f2-0c98ac7b43e4-logs\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.786312 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-config-data\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.798200 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7935698e-d40a-4c10-bf91-0a5d8855a09e-kube-api-access-vj9nw" (OuterVolumeSpecName: "kube-api-access-vj9nw") pod "7935698e-d40a-4c10-bf91-0a5d8855a09e" (UID: "7935698e-d40a-4c10-bf91-0a5d8855a09e"). InnerVolumeSpecName "kube-api-access-vj9nw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.803784 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-config-data-custom\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.804323 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-combined-ca-bundle\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.804922 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzrz9\" (UniqueName: \"kubernetes.io/projected/17a9b394-1d22-44af-86f2-0c98ac7b43e4-kube-api-access-qzrz9\") pod \"barbican-api-577b88f67d-xws59\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.833292 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.849994 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7935698e-d40a-4c10-bf91-0a5d8855a09e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7935698e-d40a-4c10-bf91-0a5d8855a09e" (UID: "7935698e-d40a-4c10-bf91-0a5d8855a09e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.884342 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7935698e-d40a-4c10-bf91-0a5d8855a09e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.884377 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vj9nw\" (UniqueName: \"kubernetes.io/projected/7935698e-d40a-4c10-bf91-0a5d8855a09e-kube-api-access-vj9nw\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.940779 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7935698e-d40a-4c10-bf91-0a5d8855a09e-config-data" (OuterVolumeSpecName: "config-data") pod "7935698e-d40a-4c10-bf91-0a5d8855a09e" (UID: "7935698e-d40a-4c10-bf91-0a5d8855a09e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:53 crc kubenswrapper[4935]: I1201 18:54:53.987490 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7935698e-d40a-4c10-bf91-0a5d8855a09e-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:54 crc kubenswrapper[4935]: I1201 18:54:54.147908 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-747dz" event={"ID":"7935698e-d40a-4c10-bf91-0a5d8855a09e","Type":"ContainerDied","Data":"05e45234abe61a069c8acb28a90ee080b797a0f06000eb1a2ef8974c4ee7ec7a"} Dec 01 18:54:54 crc kubenswrapper[4935]: I1201 18:54:54.148259 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05e45234abe61a069c8acb28a90ee080b797a0f06000eb1a2ef8974c4ee7ec7a" Dec 01 18:54:54 crc kubenswrapper[4935]: I1201 18:54:54.148320 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-747dz" Dec 01 18:54:54 crc kubenswrapper[4935]: I1201 18:54:54.154695 4935 generic.go:334] "Generic (PLEG): container finished" podID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerID="b14c34b1b0dda050ae97b1323f3ad1f76fce0fc3439cef9b136e325e3819408b" exitCode=0 Dec 01 18:54:54 crc kubenswrapper[4935]: I1201 18:54:54.154891 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7392ddad-969f-4a42-86a1-460f3ca2d500","Type":"ContainerDied","Data":"b14c34b1b0dda050ae97b1323f3ad1f76fce0fc3439cef9b136e325e3819408b"} Dec 01 18:54:54 crc kubenswrapper[4935]: I1201 18:54:54.346671 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:54:54 crc kubenswrapper[4935]: I1201 18:54:54.346741 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:54:54 crc kubenswrapper[4935]: I1201 18:54:54.561678 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6b8c989c57-rgsh8"] Dec 01 18:54:54 crc kubenswrapper[4935]: I1201 18:54:54.571577 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-jtlbc"] Dec 01 18:54:54 crc kubenswrapper[4935]: I1201 18:54:54.783361 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-577b88f67d-xws59"] Dec 01 18:54:54 crc kubenswrapper[4935]: W1201 18:54:54.793762 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17a9b394_1d22_44af_86f2_0c98ac7b43e4.slice/crio-7238540402b3d75c3b73c9f2dcfe12553ce9204c00ab52b1434787e55b411f6b WatchSource:0}: Error finding container 7238540402b3d75c3b73c9f2dcfe12553ce9204c00ab52b1434787e55b411f6b: Status 404 returned error can't find the container with id 7238540402b3d75c3b73c9f2dcfe12553ce9204c00ab52b1434787e55b411f6b Dec 01 18:54:54 crc kubenswrapper[4935]: I1201 18:54:54.843034 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-684877c94b-g4nmw"] Dec 01 18:54:54 crc kubenswrapper[4935]: W1201 18:54:54.860399 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d980d57_2d10_4065_990f_e381180f2175.slice/crio-06ee222af32a26c8505f85c3a4a19fe2c658bd298107a1e6de4a339451a1ef35 WatchSource:0}: Error finding container 06ee222af32a26c8505f85c3a4a19fe2c658bd298107a1e6de4a339451a1ef35: Status 404 returned error can't find the container with id 06ee222af32a26c8505f85c3a4a19fe2c658bd298107a1e6de4a339451a1ef35 Dec 01 18:54:54 crc kubenswrapper[4935]: I1201 18:54:54.964826 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-c8448" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.043981 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.115008 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-combined-ca-bundle\") pod \"b8f827a2-a529-4371-8c82-c06377b2c9f2\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.115065 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-config-data\") pod \"b8f827a2-a529-4371-8c82-c06377b2c9f2\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.115086 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8f827a2-a529-4371-8c82-c06377b2c9f2-etc-machine-id\") pod \"b8f827a2-a529-4371-8c82-c06377b2c9f2\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.115203 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b8f827a2-a529-4371-8c82-c06377b2c9f2-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b8f827a2-a529-4371-8c82-c06377b2c9f2" (UID: "b8f827a2-a529-4371-8c82-c06377b2c9f2"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.115240 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-db-sync-config-data\") pod \"b8f827a2-a529-4371-8c82-c06377b2c9f2\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.115309 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lpkrf\" (UniqueName: \"kubernetes.io/projected/b8f827a2-a529-4371-8c82-c06377b2c9f2-kube-api-access-lpkrf\") pod \"b8f827a2-a529-4371-8c82-c06377b2c9f2\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.115406 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-scripts\") pod \"b8f827a2-a529-4371-8c82-c06377b2c9f2\" (UID: \"b8f827a2-a529-4371-8c82-c06377b2c9f2\") " Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.116158 4935 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8f827a2-a529-4371-8c82-c06377b2c9f2-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.119542 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b8f827a2-a529-4371-8c82-c06377b2c9f2" (UID: "b8f827a2-a529-4371-8c82-c06377b2c9f2"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.119577 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-scripts" (OuterVolumeSpecName: "scripts") pod "b8f827a2-a529-4371-8c82-c06377b2c9f2" (UID: "b8f827a2-a529-4371-8c82-c06377b2c9f2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.122163 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8f827a2-a529-4371-8c82-c06377b2c9f2-kube-api-access-lpkrf" (OuterVolumeSpecName: "kube-api-access-lpkrf") pod "b8f827a2-a529-4371-8c82-c06377b2c9f2" (UID: "b8f827a2-a529-4371-8c82-c06377b2c9f2"). InnerVolumeSpecName "kube-api-access-lpkrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.160685 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8f827a2-a529-4371-8c82-c06377b2c9f2" (UID: "b8f827a2-a529-4371-8c82-c06377b2c9f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.180239 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7392ddad-969f-4a42-86a1-460f3ca2d500","Type":"ContainerDied","Data":"9e6b292b838cdc8e7350b1a12336bb416154917bc4b6477380d2a22dc1b1859b"} Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.180287 4935 scope.go:117] "RemoveContainer" containerID="a93de29124ae8375cb246f75a03012a007af59031b6b08bbae8af934f4208e85" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.180437 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.183655 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b8c989c57-rgsh8" event={"ID":"05c203ba-25f0-4331-bf4f-19593176e6a1","Type":"ContainerStarted","Data":"5ceea4052f88d5f73a3b14e5414432f49aeea5846bd6db5479f80f58787e9186"} Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.186259 4935 generic.go:334] "Generic (PLEG): container finished" podID="9f63b40a-2c2a-4bd3-9d3f-c74d70286082" containerID="1f7789a0143a24acdaeef7679e202c29ad04517bbff9053eea70cf4605eb84ad" exitCode=0 Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.186306 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" event={"ID":"9f63b40a-2c2a-4bd3-9d3f-c74d70286082","Type":"ContainerDied","Data":"1f7789a0143a24acdaeef7679e202c29ad04517bbff9053eea70cf4605eb84ad"} Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.186549 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" event={"ID":"9f63b40a-2c2a-4bd3-9d3f-c74d70286082","Type":"ContainerStarted","Data":"d3ea61122137bb7db6ef08660054c9c1b0041f6adc69a428ac6e72b13518f942"} Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.187826 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-577b88f67d-xws59" event={"ID":"17a9b394-1d22-44af-86f2-0c98ac7b43e4","Type":"ContainerStarted","Data":"ea277e497d6351e79866fda37cc08f2c49389e6b572b0cb3f929da917a4fdf6a"} Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.187848 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-577b88f67d-xws59" event={"ID":"17a9b394-1d22-44af-86f2-0c98ac7b43e4","Type":"ContainerStarted","Data":"7238540402b3d75c3b73c9f2dcfe12553ce9204c00ab52b1434787e55b411f6b"} Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.191995 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-config-data" (OuterVolumeSpecName: "config-data") pod "b8f827a2-a529-4371-8c82-c06377b2c9f2" (UID: "b8f827a2-a529-4371-8c82-c06377b2c9f2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.196054 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-c8448" event={"ID":"b8f827a2-a529-4371-8c82-c06377b2c9f2","Type":"ContainerDied","Data":"621035122a958e7d1058fa378aff571c1f561a2df5397d179bf6aa9f07c69e6f"} Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.196102 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="621035122a958e7d1058fa378aff571c1f561a2df5397d179bf6aa9f07c69e6f" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.196071 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-c8448" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.198429 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" event={"ID":"4d980d57-2d10-4065-990f-e381180f2175","Type":"ContainerStarted","Data":"06ee222af32a26c8505f85c3a4a19fe2c658bd298107a1e6de4a339451a1ef35"} Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.217454 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-scripts\") pod \"7392ddad-969f-4a42-86a1-460f3ca2d500\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.217757 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-config-data\") pod \"7392ddad-969f-4a42-86a1-460f3ca2d500\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.217796 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7392ddad-969f-4a42-86a1-460f3ca2d500-run-httpd\") pod \"7392ddad-969f-4a42-86a1-460f3ca2d500\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.217866 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-combined-ca-bundle\") pod \"7392ddad-969f-4a42-86a1-460f3ca2d500\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.217894 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7392ddad-969f-4a42-86a1-460f3ca2d500-log-httpd\") pod \"7392ddad-969f-4a42-86a1-460f3ca2d500\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.217919 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-sg-core-conf-yaml\") pod \"7392ddad-969f-4a42-86a1-460f3ca2d500\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.217949 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sc5sd\" (UniqueName: \"kubernetes.io/projected/7392ddad-969f-4a42-86a1-460f3ca2d500-kube-api-access-sc5sd\") pod \"7392ddad-969f-4a42-86a1-460f3ca2d500\" (UID: \"7392ddad-969f-4a42-86a1-460f3ca2d500\") " Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.218465 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.218478 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.218487 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.218495 4935 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b8f827a2-a529-4371-8c82-c06377b2c9f2-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.218504 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lpkrf\" (UniqueName: \"kubernetes.io/projected/b8f827a2-a529-4371-8c82-c06377b2c9f2-kube-api-access-lpkrf\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.221385 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7392ddad-969f-4a42-86a1-460f3ca2d500-kube-api-access-sc5sd" (OuterVolumeSpecName: "kube-api-access-sc5sd") pod "7392ddad-969f-4a42-86a1-460f3ca2d500" (UID: "7392ddad-969f-4a42-86a1-460f3ca2d500"). InnerVolumeSpecName "kube-api-access-sc5sd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.221597 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7392ddad-969f-4a42-86a1-460f3ca2d500-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7392ddad-969f-4a42-86a1-460f3ca2d500" (UID: "7392ddad-969f-4a42-86a1-460f3ca2d500"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.225019 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7392ddad-969f-4a42-86a1-460f3ca2d500-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7392ddad-969f-4a42-86a1-460f3ca2d500" (UID: "7392ddad-969f-4a42-86a1-460f3ca2d500"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.228296 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-scripts" (OuterVolumeSpecName: "scripts") pod "7392ddad-969f-4a42-86a1-460f3ca2d500" (UID: "7392ddad-969f-4a42-86a1-460f3ca2d500"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.261520 4935 scope.go:117] "RemoveContainer" containerID="02c8167fd5312deb5426c6ac35e5b0aec6a2092c004d81bd70495fc30609f141" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.288652 4935 scope.go:117] "RemoveContainer" containerID="b14c34b1b0dda050ae97b1323f3ad1f76fce0fc3439cef9b136e325e3819408b" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.288815 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7392ddad-969f-4a42-86a1-460f3ca2d500" (UID: "7392ddad-969f-4a42-86a1-460f3ca2d500"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.320950 4935 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7392ddad-969f-4a42-86a1-460f3ca2d500-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.322461 4935 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.322486 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sc5sd\" (UniqueName: \"kubernetes.io/projected/7392ddad-969f-4a42-86a1-460f3ca2d500-kube-api-access-sc5sd\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.322500 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.322509 4935 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7392ddad-969f-4a42-86a1-460f3ca2d500-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.399281 4935 scope.go:117] "RemoveContainer" containerID="4629a371e99213e2533baf55c30b880375731480aa38b623530d9a06141e7080" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.404219 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7392ddad-969f-4a42-86a1-460f3ca2d500" (UID: "7392ddad-969f-4a42-86a1-460f3ca2d500"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.418293 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 18:54:55 crc kubenswrapper[4935]: E1201 18:54:55.418795 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="sg-core" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.418811 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="sg-core" Dec 01 18:54:55 crc kubenswrapper[4935]: E1201 18:54:55.418822 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7935698e-d40a-4c10-bf91-0a5d8855a09e" containerName="heat-db-sync" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.418829 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="7935698e-d40a-4c10-bf91-0a5d8855a09e" containerName="heat-db-sync" Dec 01 18:54:55 crc kubenswrapper[4935]: E1201 18:54:55.418840 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f827a2-a529-4371-8c82-c06377b2c9f2" containerName="cinder-db-sync" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.418846 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f827a2-a529-4371-8c82-c06377b2c9f2" containerName="cinder-db-sync" Dec 01 18:54:55 crc kubenswrapper[4935]: E1201 18:54:55.418868 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="ceilometer-notification-agent" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.418874 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="ceilometer-notification-agent" Dec 01 18:54:55 crc kubenswrapper[4935]: E1201 18:54:55.418885 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="ceilometer-central-agent" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.418891 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="ceilometer-central-agent" Dec 01 18:54:55 crc kubenswrapper[4935]: E1201 18:54:55.418906 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="proxy-httpd" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.418912 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="proxy-httpd" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.419119 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="ceilometer-notification-agent" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.419133 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="sg-core" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.419154 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="ceilometer-central-agent" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.419164 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f827a2-a529-4371-8c82-c06377b2c9f2" containerName="cinder-db-sync" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.419177 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="7935698e-d40a-4c10-bf91-0a5d8855a09e" containerName="heat-db-sync" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.419198 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" containerName="proxy-httpd" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.420893 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.428020 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-config-data" (OuterVolumeSpecName: "config-data") pod "7392ddad-969f-4a42-86a1-460f3ca2d500" (UID: "7392ddad-969f-4a42-86a1-460f3ca2d500"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.429803 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.429819 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7392ddad-969f-4a42-86a1-460f3ca2d500-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.430923 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.431114 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-ts4v5" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.431244 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.431462 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.454887 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.500416 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-jtlbc"] Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.531507 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-config-data\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.531554 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwlq9\" (UniqueName: \"kubernetes.io/projected/247005f0-69f8-48a1-a8f9-2f6c45aca362-kube-api-access-dwlq9\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.531590 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/247005f0-69f8-48a1-a8f9-2f6c45aca362-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.531638 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.531673 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.531763 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-scripts\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.568495 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-d88l4"] Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.571603 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.623511 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-d88l4"] Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.633171 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/247005f0-69f8-48a1-a8f9-2f6c45aca362-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.633241 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.633282 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.633412 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-scripts\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.633481 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-config-data\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.633501 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwlq9\" (UniqueName: \"kubernetes.io/projected/247005f0-69f8-48a1-a8f9-2f6c45aca362-kube-api-access-dwlq9\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.633782 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/247005f0-69f8-48a1-a8f9-2f6c45aca362-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.648579 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.652483 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-scripts\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.661373 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwlq9\" (UniqueName: \"kubernetes.io/projected/247005f0-69f8-48a1-a8f9-2f6c45aca362-kube-api-access-dwlq9\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.674181 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-config-data\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.704901 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.736285 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.736535 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.736622 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-dns-svc\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.736788 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.736921 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r9ck\" (UniqueName: \"kubernetes.io/projected/693ddd06-a5c8-459d-8b68-d9e7a734809a-kube-api-access-6r9ck\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.736997 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-config\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.753719 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.756831 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.764893 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.783210 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.785846 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.798839 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.799372 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.818747 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.840509 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r9ck\" (UniqueName: \"kubernetes.io/projected/693ddd06-a5c8-459d-8b68-d9e7a734809a-kube-api-access-6r9ck\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.840574 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-config\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.840681 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.840705 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.840742 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-dns-svc\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.840827 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.841856 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.842650 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-config\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.843193 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.843741 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.881294 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-dns-svc\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.885938 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r9ck\" (UniqueName: \"kubernetes.io/projected/693ddd06-a5c8-459d-8b68-d9e7a734809a-kube-api-access-6r9ck\") pod \"dnsmasq-dns-6578955fd5-d88l4\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.916609 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.941954 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30a1f35-70d9-42c9-9bed-35ab762851ad-log-httpd\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.942206 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-scripts\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.942239 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xfvk\" (UniqueName: \"kubernetes.io/projected/c30a1f35-70d9-42c9-9bed-35ab762851ad-kube-api-access-9xfvk\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.942282 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-config-data\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.942307 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.942399 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.942416 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30a1f35-70d9-42c9-9bed-35ab762851ad-run-httpd\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.956713 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.959847 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.963195 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 01 18:54:55 crc kubenswrapper[4935]: I1201 18:54:55.985679 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.046602 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c383708-490b-4f40-a370-bce6a8d57c18-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.046720 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30a1f35-70d9-42c9-9bed-35ab762851ad-log-httpd\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.046745 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-scripts\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.046764 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c383708-490b-4f40-a370-bce6a8d57c18-logs\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.046796 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-config-data\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.046815 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xfvk\" (UniqueName: \"kubernetes.io/projected/c30a1f35-70d9-42c9-9bed-35ab762851ad-kube-api-access-9xfvk\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.046852 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-config-data-custom\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.046902 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-config-data\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.046935 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.046963 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvn26\" (UniqueName: \"kubernetes.io/projected/6c383708-490b-4f40-a370-bce6a8d57c18-kube-api-access-lvn26\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.047081 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.047108 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-scripts\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.047179 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.047219 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30a1f35-70d9-42c9-9bed-35ab762851ad-run-httpd\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.047995 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30a1f35-70d9-42c9-9bed-35ab762851ad-run-httpd\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.048797 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30a1f35-70d9-42c9-9bed-35ab762851ad-log-httpd\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.058851 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.065267 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-config-data\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.067218 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-scripts\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.083073 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xfvk\" (UniqueName: \"kubernetes.io/projected/c30a1f35-70d9-42c9-9bed-35ab762851ad-kube-api-access-9xfvk\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.091062 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.100302 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.161894 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.161945 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-scripts\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.162009 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c383708-490b-4f40-a370-bce6a8d57c18-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.162059 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c383708-490b-4f40-a370-bce6a8d57c18-logs\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.162082 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-config-data\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.162112 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-config-data-custom\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.162176 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvn26\" (UniqueName: \"kubernetes.io/projected/6c383708-490b-4f40-a370-bce6a8d57c18-kube-api-access-lvn26\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.163293 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c383708-490b-4f40-a370-bce6a8d57c18-logs\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.166542 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c383708-490b-4f40-a370-bce6a8d57c18-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.169240 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.170666 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-scripts\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.170868 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-config-data-custom\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.171232 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-config-data\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.182273 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvn26\" (UniqueName: \"kubernetes.io/projected/6c383708-490b-4f40-a370-bce6a8d57c18-kube-api-access-lvn26\") pod \"cinder-api-0\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.253237 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.284797 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.296313 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-577b88f67d-xws59" event={"ID":"17a9b394-1d22-44af-86f2-0c98ac7b43e4","Type":"ContainerStarted","Data":"3611b55a4768eb7b1b80ea0af8caa391c733e54d9851dd4b0fb3e78e7c1a049f"} Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.298119 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.298341 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.315318 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" event={"ID":"9f63b40a-2c2a-4bd3-9d3f-c74d70286082","Type":"ContainerStarted","Data":"32dfb63503d99736fd10ac1fcfe6803feeae2661f5b6cb558876d87015ad4dd0"} Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.315509 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" podUID="9f63b40a-2c2a-4bd3-9d3f-c74d70286082" containerName="dnsmasq-dns" containerID="cri-o://32dfb63503d99736fd10ac1fcfe6803feeae2661f5b6cb558876d87015ad4dd0" gracePeriod=10 Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.315604 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.338717 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-577b88f67d-xws59" podStartSLOduration=3.338698218 podStartE2EDuration="3.338698218s" podCreationTimestamp="2025-12-01 18:54:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:54:56.328511763 +0000 UTC m=+1510.350141022" watchObservedRunningTime="2025-12-01 18:54:56.338698218 +0000 UTC m=+1510.360327477" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.368331 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" podStartSLOduration=3.368306876 podStartE2EDuration="3.368306876s" podCreationTimestamp="2025-12-01 18:54:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:54:56.3503751 +0000 UTC m=+1510.372004359" watchObservedRunningTime="2025-12-01 18:54:56.368306876 +0000 UTC m=+1510.389936135" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.527102 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7392ddad-969f-4a42-86a1-460f3ca2d500" path="/var/lib/kubelet/pods/7392ddad-969f-4a42-86a1-460f3ca2d500/volumes" Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.580215 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 18:54:56 crc kubenswrapper[4935]: W1201 18:54:56.581115 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod247005f0_69f8_48a1_a8f9_2f6c45aca362.slice/crio-ed7fea9aeafd4c8c3ffbb8feb3557c6b5f9225e1a99369a8630bab43600c8b26 WatchSource:0}: Error finding container ed7fea9aeafd4c8c3ffbb8feb3557c6b5f9225e1a99369a8630bab43600c8b26: Status 404 returned error can't find the container with id ed7fea9aeafd4c8c3ffbb8feb3557c6b5f9225e1a99369a8630bab43600c8b26 Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.711422 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-d88l4"] Dec 01 18:54:56 crc kubenswrapper[4935]: I1201 18:54:56.955772 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.166234 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.327030 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"247005f0-69f8-48a1-a8f9-2f6c45aca362","Type":"ContainerStarted","Data":"ed7fea9aeafd4c8c3ffbb8feb3557c6b5f9225e1a99369a8630bab43600c8b26"} Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.328140 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-d88l4" event={"ID":"693ddd06-a5c8-459d-8b68-d9e7a734809a","Type":"ContainerStarted","Data":"ae047209bf890e4c768519a3153c3c1642c762279f641e5214cda1b31bf2e11e"} Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.329800 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" event={"ID":"9f63b40a-2c2a-4bd3-9d3f-c74d70286082","Type":"ContainerDied","Data":"32dfb63503d99736fd10ac1fcfe6803feeae2661f5b6cb558876d87015ad4dd0"} Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.329937 4935 generic.go:334] "Generic (PLEG): container finished" podID="9f63b40a-2c2a-4bd3-9d3f-c74d70286082" containerID="32dfb63503d99736fd10ac1fcfe6803feeae2661f5b6cb558876d87015ad4dd0" exitCode=0 Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.330004 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" event={"ID":"9f63b40a-2c2a-4bd3-9d3f-c74d70286082","Type":"ContainerDied","Data":"d3ea61122137bb7db6ef08660054c9c1b0041f6adc69a428ac6e72b13518f942"} Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.330028 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3ea61122137bb7db6ef08660054c9c1b0041f6adc69a428ac6e72b13518f942" Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.331360 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6c383708-490b-4f40-a370-bce6a8d57c18","Type":"ContainerStarted","Data":"d2df109cbdbf6b1dbece795953dfea2c79f9c4cc63b33a875fcbfdc68734b971"} Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.374158 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.508314 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-dns-svc\") pod \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.508359 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pbg8\" (UniqueName: \"kubernetes.io/projected/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-kube-api-access-7pbg8\") pod \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.508416 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-dns-swift-storage-0\") pod \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.508518 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-ovsdbserver-nb\") pod \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.508725 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-config\") pod \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.508779 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-ovsdbserver-sb\") pod \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\" (UID: \"9f63b40a-2c2a-4bd3-9d3f-c74d70286082\") " Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.519504 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-kube-api-access-7pbg8" (OuterVolumeSpecName: "kube-api-access-7pbg8") pod "9f63b40a-2c2a-4bd3-9d3f-c74d70286082" (UID: "9f63b40a-2c2a-4bd3-9d3f-c74d70286082"). InnerVolumeSpecName "kube-api-access-7pbg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.572544 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9f63b40a-2c2a-4bd3-9d3f-c74d70286082" (UID: "9f63b40a-2c2a-4bd3-9d3f-c74d70286082"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.590302 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9f63b40a-2c2a-4bd3-9d3f-c74d70286082" (UID: "9f63b40a-2c2a-4bd3-9d3f-c74d70286082"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.592809 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-config" (OuterVolumeSpecName: "config") pod "9f63b40a-2c2a-4bd3-9d3f-c74d70286082" (UID: "9f63b40a-2c2a-4bd3-9d3f-c74d70286082"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.599065 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9f63b40a-2c2a-4bd3-9d3f-c74d70286082" (UID: "9f63b40a-2c2a-4bd3-9d3f-c74d70286082"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.606041 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9f63b40a-2c2a-4bd3-9d3f-c74d70286082" (UID: "9f63b40a-2c2a-4bd3-9d3f-c74d70286082"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.611069 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.611094 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.611106 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.611115 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.611124 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pbg8\" (UniqueName: \"kubernetes.io/projected/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-kube-api-access-7pbg8\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:57 crc kubenswrapper[4935]: I1201 18:54:57.611134 4935 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f63b40a-2c2a-4bd3-9d3f-c74d70286082-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 18:54:58 crc kubenswrapper[4935]: I1201 18:54:58.344686 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30a1f35-70d9-42c9-9bed-35ab762851ad","Type":"ContainerStarted","Data":"e98c3c2bfa7c474df1e852c9dcc5cec04f75d3d907e29f9bf56a63a2509eb9dd"} Dec 01 18:54:58 crc kubenswrapper[4935]: I1201 18:54:58.347129 4935 generic.go:334] "Generic (PLEG): container finished" podID="693ddd06-a5c8-459d-8b68-d9e7a734809a" containerID="67c9f2be32fcef81cef4f2e86e1d9b85b90c782d2ae2e7b8a50413862e3d1f3c" exitCode=0 Dec 01 18:54:58 crc kubenswrapper[4935]: I1201 18:54:58.347222 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-d88l4" event={"ID":"693ddd06-a5c8-459d-8b68-d9e7a734809a","Type":"ContainerDied","Data":"67c9f2be32fcef81cef4f2e86e1d9b85b90c782d2ae2e7b8a50413862e3d1f3c"} Dec 01 18:54:58 crc kubenswrapper[4935]: I1201 18:54:58.348970 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" event={"ID":"4d980d57-2d10-4065-990f-e381180f2175","Type":"ContainerStarted","Data":"02f4a70f37bda250fd9478f20ebfa3088768db4224cabd2226d745925cd81b1c"} Dec 01 18:54:58 crc kubenswrapper[4935]: I1201 18:54:58.351219 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b8c989c57-rgsh8" event={"ID":"05c203ba-25f0-4331-bf4f-19593176e6a1","Type":"ContainerStarted","Data":"c7e7432f7af1bb2e502ac6a4bfcaf46d28affeace2a2be0de40b76a01339041b"} Dec 01 18:54:58 crc kubenswrapper[4935]: I1201 18:54:58.351287 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-jtlbc" Dec 01 18:54:58 crc kubenswrapper[4935]: I1201 18:54:58.352378 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b8c989c57-rgsh8" event={"ID":"05c203ba-25f0-4331-bf4f-19593176e6a1","Type":"ContainerStarted","Data":"7d68bd4ad64226acdba510ace30c7a0b8263196cc07f678e554ab5a5525e8eb7"} Dec 01 18:54:58 crc kubenswrapper[4935]: I1201 18:54:58.393160 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6b8c989c57-rgsh8" podStartSLOduration=2.163658764 podStartE2EDuration="5.393130941s" podCreationTimestamp="2025-12-01 18:54:53 +0000 UTC" firstStartedPulling="2025-12-01 18:54:54.556216115 +0000 UTC m=+1508.577845374" lastFinishedPulling="2025-12-01 18:54:57.785688292 +0000 UTC m=+1511.807317551" observedRunningTime="2025-12-01 18:54:58.391481021 +0000 UTC m=+1512.413110280" watchObservedRunningTime="2025-12-01 18:54:58.393130941 +0000 UTC m=+1512.414760200" Dec 01 18:54:58 crc kubenswrapper[4935]: I1201 18:54:58.428965 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-jtlbc"] Dec 01 18:54:58 crc kubenswrapper[4935]: I1201 18:54:58.441330 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-jtlbc"] Dec 01 18:54:58 crc kubenswrapper[4935]: I1201 18:54:58.535929 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f63b40a-2c2a-4bd3-9d3f-c74d70286082" path="/var/lib/kubelet/pods/9f63b40a-2c2a-4bd3-9d3f-c74d70286082/volumes" Dec 01 18:54:59 crc kubenswrapper[4935]: I1201 18:54:59.001821 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:59 crc kubenswrapper[4935]: I1201 18:54:59.065043 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:54:59 crc kubenswrapper[4935]: I1201 18:54:59.247081 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nbbbd"] Dec 01 18:54:59 crc kubenswrapper[4935]: I1201 18:54:59.367068 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"247005f0-69f8-48a1-a8f9-2f6c45aca362","Type":"ContainerStarted","Data":"52895d5ad61c4acadc54ee9f44e03f933ea27908f56d827e641ee93b49c1edda"} Dec 01 18:54:59 crc kubenswrapper[4935]: I1201 18:54:59.369624 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-d88l4" event={"ID":"693ddd06-a5c8-459d-8b68-d9e7a734809a","Type":"ContainerStarted","Data":"33c0abbb7a476d4388ac05e968ed5b306cd2bb060824d05c5e807f707b6ed370"} Dec 01 18:54:59 crc kubenswrapper[4935]: I1201 18:54:59.369776 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:54:59 crc kubenswrapper[4935]: I1201 18:54:59.374664 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" event={"ID":"4d980d57-2d10-4065-990f-e381180f2175","Type":"ContainerStarted","Data":"531cfe1be35cb338088819f784d78f1fa5dc2ceb1391d67bb3d422f624af2467"} Dec 01 18:54:59 crc kubenswrapper[4935]: I1201 18:54:59.378649 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30a1f35-70d9-42c9-9bed-35ab762851ad","Type":"ContainerStarted","Data":"d675c6c9d0dbd1ae89981a658cd0acf95b0ea7b1221e250627bcf21aa4713bd0"} Dec 01 18:54:59 crc kubenswrapper[4935]: I1201 18:54:59.380785 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6c383708-490b-4f40-a370-bce6a8d57c18","Type":"ContainerStarted","Data":"22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd"} Dec 01 18:54:59 crc kubenswrapper[4935]: I1201 18:54:59.392517 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6578955fd5-d88l4" podStartSLOduration=4.39249983 podStartE2EDuration="4.39249983s" podCreationTimestamp="2025-12-01 18:54:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:54:59.385567516 +0000 UTC m=+1513.407196795" watchObservedRunningTime="2025-12-01 18:54:59.39249983 +0000 UTC m=+1513.414129089" Dec 01 18:54:59 crc kubenswrapper[4935]: I1201 18:54:59.414442 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-684877c94b-g4nmw" podStartSLOduration=3.4907121500000002 podStartE2EDuration="6.414422119s" podCreationTimestamp="2025-12-01 18:54:53 +0000 UTC" firstStartedPulling="2025-12-01 18:54:54.862027134 +0000 UTC m=+1508.883656393" lastFinishedPulling="2025-12-01 18:54:57.785737093 +0000 UTC m=+1511.807366362" observedRunningTime="2025-12-01 18:54:59.408437834 +0000 UTC m=+1513.430067103" watchObservedRunningTime="2025-12-01 18:54:59.414422119 +0000 UTC m=+1513.436051378" Dec 01 18:54:59 crc kubenswrapper[4935]: I1201 18:54:59.744156 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.230356 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5f6b854f9d-n7chb"] Dec 01 18:55:00 crc kubenswrapper[4935]: E1201 18:55:00.231296 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f63b40a-2c2a-4bd3-9d3f-c74d70286082" containerName="dnsmasq-dns" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.231316 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f63b40a-2c2a-4bd3-9d3f-c74d70286082" containerName="dnsmasq-dns" Dec 01 18:55:00 crc kubenswrapper[4935]: E1201 18:55:00.231379 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f63b40a-2c2a-4bd3-9d3f-c74d70286082" containerName="init" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.231386 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f63b40a-2c2a-4bd3-9d3f-c74d70286082" containerName="init" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.231590 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f63b40a-2c2a-4bd3-9d3f-c74d70286082" containerName="dnsmasq-dns" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.232927 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.237511 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.237778 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.267804 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5f6b854f9d-n7chb"] Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.277033 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-config-data-custom\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.277171 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-public-tls-certs\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.277230 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-config-data\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.277268 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ndlq\" (UniqueName: \"kubernetes.io/projected/0f1b009e-5300-416a-a397-79765cbcad0d-kube-api-access-4ndlq\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.277344 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f1b009e-5300-416a-a397-79765cbcad0d-logs\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.277365 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-combined-ca-bundle\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.277385 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-internal-tls-certs\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.379893 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-public-tls-certs\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.379967 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-config-data\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.380007 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ndlq\" (UniqueName: \"kubernetes.io/projected/0f1b009e-5300-416a-a397-79765cbcad0d-kube-api-access-4ndlq\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.380073 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f1b009e-5300-416a-a397-79765cbcad0d-logs\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.380095 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-combined-ca-bundle\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.380113 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-internal-tls-certs\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.380140 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-config-data-custom\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.380842 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f1b009e-5300-416a-a397-79765cbcad0d-logs\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.385482 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-combined-ca-bundle\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.400312 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-config-data\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.403686 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ndlq\" (UniqueName: \"kubernetes.io/projected/0f1b009e-5300-416a-a397-79765cbcad0d-kube-api-access-4ndlq\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.404444 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-public-tls-certs\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.404892 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-internal-tls-certs\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.406184 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0f1b009e-5300-416a-a397-79765cbcad0d-config-data-custom\") pod \"barbican-api-5f6b854f9d-n7chb\" (UID: \"0f1b009e-5300-416a-a397-79765cbcad0d\") " pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.407014 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30a1f35-70d9-42c9-9bed-35ab762851ad","Type":"ContainerStarted","Data":"7c129fc653bcfcbcadc9cc03239139da234ba79e4d55b4db33f55849910b93ba"} Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.411657 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6c383708-490b-4f40-a370-bce6a8d57c18","Type":"ContainerStarted","Data":"9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162"} Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.411784 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.415649 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"247005f0-69f8-48a1-a8f9-2f6c45aca362","Type":"ContainerStarted","Data":"9462a31c3b07b65201b8250871b4a3d7c201769ec0e2217d0b69aa8208a0dc20"} Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.416263 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nbbbd" podUID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" containerName="registry-server" containerID="cri-o://aa3ff824d527262d003d5e092b1360a26e74649a1613ad8ba8426b60df0f1b4c" gracePeriod=2 Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.439526 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.439505755 podStartE2EDuration="5.439505755s" podCreationTimestamp="2025-12-01 18:54:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:00.432162708 +0000 UTC m=+1514.453791967" watchObservedRunningTime="2025-12-01 18:55:00.439505755 +0000 UTC m=+1514.461135014" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.464454 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.930986014 podStartE2EDuration="5.464439358s" podCreationTimestamp="2025-12-01 18:54:55 +0000 UTC" firstStartedPulling="2025-12-01 18:54:56.599907355 +0000 UTC m=+1510.621536614" lastFinishedPulling="2025-12-01 18:54:58.133360699 +0000 UTC m=+1512.154989958" observedRunningTime="2025-12-01 18:55:00.45870461 +0000 UTC m=+1514.480333869" watchObservedRunningTime="2025-12-01 18:55:00.464439358 +0000 UTC m=+1514.486068617" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.667677 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:00 crc kubenswrapper[4935]: I1201 18:55:00.759804 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.046211 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.117040 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-catalog-content\") pod \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\" (UID: \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\") " Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.117156 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-utilities\") pod \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\" (UID: \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\") " Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.117187 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6n9p4\" (UniqueName: \"kubernetes.io/projected/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-kube-api-access-6n9p4\") pod \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\" (UID: \"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f\") " Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.119433 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-utilities" (OuterVolumeSpecName: "utilities") pod "4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" (UID: "4d2c1d1a-fcb2-465a-a9f2-7439e891e19f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.134776 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-kube-api-access-6n9p4" (OuterVolumeSpecName: "kube-api-access-6n9p4") pod "4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" (UID: "4d2c1d1a-fcb2-465a-a9f2-7439e891e19f"). InnerVolumeSpecName "kube-api-access-6n9p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.219732 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.219768 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6n9p4\" (UniqueName: \"kubernetes.io/projected/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-kube-api-access-6n9p4\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.285507 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" (UID: "4d2c1d1a-fcb2-465a-a9f2-7439e891e19f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.312539 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5f6b854f9d-n7chb"] Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.321703 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.434484 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30a1f35-70d9-42c9-9bed-35ab762851ad","Type":"ContainerStarted","Data":"748ec26e4aaa6182245ba5d4dc47e97789f7241556bd02482bb191e56fab9b67"} Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.435490 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6b854f9d-n7chb" event={"ID":"0f1b009e-5300-416a-a397-79765cbcad0d","Type":"ContainerStarted","Data":"593b04c507fd2dc6879e6b93834720b968f6d08c0a20c9151ac627dab7e48778"} Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.438239 4935 generic.go:334] "Generic (PLEG): container finished" podID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" containerID="aa3ff824d527262d003d5e092b1360a26e74649a1613ad8ba8426b60df0f1b4c" exitCode=0 Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.438924 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbbbd" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.439213 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbbbd" event={"ID":"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f","Type":"ContainerDied","Data":"aa3ff824d527262d003d5e092b1360a26e74649a1613ad8ba8426b60df0f1b4c"} Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.439273 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbbbd" event={"ID":"4d2c1d1a-fcb2-465a-a9f2-7439e891e19f","Type":"ContainerDied","Data":"f8cc284f31f03d12dae19eff3bc93bd7b183e455df4e397d39c2c5add5163776"} Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.439307 4935 scope.go:117] "RemoveContainer" containerID="aa3ff824d527262d003d5e092b1360a26e74649a1613ad8ba8426b60df0f1b4c" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.439591 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="6c383708-490b-4f40-a370-bce6a8d57c18" containerName="cinder-api-log" containerID="cri-o://22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd" gracePeriod=30 Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.439657 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="6c383708-490b-4f40-a370-bce6a8d57c18" containerName="cinder-api" containerID="cri-o://9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162" gracePeriod=30 Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.481608 4935 scope.go:117] "RemoveContainer" containerID="072a445725877b1568f887e71245dd31efaac281dc614a27dce1ab713666d72a" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.510375 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nbbbd"] Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.529478 4935 scope.go:117] "RemoveContainer" containerID="9a4be17e31ab2b86f171947234b26fcbddef8ce0f6c66676b389ff8b360d1ca5" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.541000 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nbbbd"] Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.566443 4935 scope.go:117] "RemoveContainer" containerID="aa3ff824d527262d003d5e092b1360a26e74649a1613ad8ba8426b60df0f1b4c" Dec 01 18:55:01 crc kubenswrapper[4935]: E1201 18:55:01.566862 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa3ff824d527262d003d5e092b1360a26e74649a1613ad8ba8426b60df0f1b4c\": container with ID starting with aa3ff824d527262d003d5e092b1360a26e74649a1613ad8ba8426b60df0f1b4c not found: ID does not exist" containerID="aa3ff824d527262d003d5e092b1360a26e74649a1613ad8ba8426b60df0f1b4c" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.566905 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa3ff824d527262d003d5e092b1360a26e74649a1613ad8ba8426b60df0f1b4c"} err="failed to get container status \"aa3ff824d527262d003d5e092b1360a26e74649a1613ad8ba8426b60df0f1b4c\": rpc error: code = NotFound desc = could not find container \"aa3ff824d527262d003d5e092b1360a26e74649a1613ad8ba8426b60df0f1b4c\": container with ID starting with aa3ff824d527262d003d5e092b1360a26e74649a1613ad8ba8426b60df0f1b4c not found: ID does not exist" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.566932 4935 scope.go:117] "RemoveContainer" containerID="072a445725877b1568f887e71245dd31efaac281dc614a27dce1ab713666d72a" Dec 01 18:55:01 crc kubenswrapper[4935]: E1201 18:55:01.567350 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"072a445725877b1568f887e71245dd31efaac281dc614a27dce1ab713666d72a\": container with ID starting with 072a445725877b1568f887e71245dd31efaac281dc614a27dce1ab713666d72a not found: ID does not exist" containerID="072a445725877b1568f887e71245dd31efaac281dc614a27dce1ab713666d72a" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.567387 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"072a445725877b1568f887e71245dd31efaac281dc614a27dce1ab713666d72a"} err="failed to get container status \"072a445725877b1568f887e71245dd31efaac281dc614a27dce1ab713666d72a\": rpc error: code = NotFound desc = could not find container \"072a445725877b1568f887e71245dd31efaac281dc614a27dce1ab713666d72a\": container with ID starting with 072a445725877b1568f887e71245dd31efaac281dc614a27dce1ab713666d72a not found: ID does not exist" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.567410 4935 scope.go:117] "RemoveContainer" containerID="9a4be17e31ab2b86f171947234b26fcbddef8ce0f6c66676b389ff8b360d1ca5" Dec 01 18:55:01 crc kubenswrapper[4935]: E1201 18:55:01.567730 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a4be17e31ab2b86f171947234b26fcbddef8ce0f6c66676b389ff8b360d1ca5\": container with ID starting with 9a4be17e31ab2b86f171947234b26fcbddef8ce0f6c66676b389ff8b360d1ca5 not found: ID does not exist" containerID="9a4be17e31ab2b86f171947234b26fcbddef8ce0f6c66676b389ff8b360d1ca5" Dec 01 18:55:01 crc kubenswrapper[4935]: I1201 18:55:01.567772 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a4be17e31ab2b86f171947234b26fcbddef8ce0f6c66676b389ff8b360d1ca5"} err="failed to get container status \"9a4be17e31ab2b86f171947234b26fcbddef8ce0f6c66676b389ff8b360d1ca5\": rpc error: code = NotFound desc = could not find container \"9a4be17e31ab2b86f171947234b26fcbddef8ce0f6c66676b389ff8b360d1ca5\": container with ID starting with 9a4be17e31ab2b86f171947234b26fcbddef8ce0f6c66676b389ff8b360d1ca5 not found: ID does not exist" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.071060 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6f7cd766b5-766jz" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.073663 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.141695 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c383708-490b-4f40-a370-bce6a8d57c18-etc-machine-id\") pod \"6c383708-490b-4f40-a370-bce6a8d57c18\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.141770 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-scripts\") pod \"6c383708-490b-4f40-a370-bce6a8d57c18\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.141814 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-combined-ca-bundle\") pod \"6c383708-490b-4f40-a370-bce6a8d57c18\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.141849 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c383708-490b-4f40-a370-bce6a8d57c18-logs\") pod \"6c383708-490b-4f40-a370-bce6a8d57c18\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.141960 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-config-data-custom\") pod \"6c383708-490b-4f40-a370-bce6a8d57c18\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.142101 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-config-data\") pod \"6c383708-490b-4f40-a370-bce6a8d57c18\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.142132 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvn26\" (UniqueName: \"kubernetes.io/projected/6c383708-490b-4f40-a370-bce6a8d57c18-kube-api-access-lvn26\") pod \"6c383708-490b-4f40-a370-bce6a8d57c18\" (UID: \"6c383708-490b-4f40-a370-bce6a8d57c18\") " Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.143042 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6c383708-490b-4f40-a370-bce6a8d57c18-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6c383708-490b-4f40-a370-bce6a8d57c18" (UID: "6c383708-490b-4f40-a370-bce6a8d57c18"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.156222 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c383708-490b-4f40-a370-bce6a8d57c18-logs" (OuterVolumeSpecName: "logs") pod "6c383708-490b-4f40-a370-bce6a8d57c18" (UID: "6c383708-490b-4f40-a370-bce6a8d57c18"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.159824 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-scripts" (OuterVolumeSpecName: "scripts") pod "6c383708-490b-4f40-a370-bce6a8d57c18" (UID: "6c383708-490b-4f40-a370-bce6a8d57c18"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.166321 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5f87769df8-nbb75"] Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.166727 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5f87769df8-nbb75" podUID="c1728f1b-5640-4c6d-ba3c-c8096d0407c4" containerName="neutron-httpd" containerID="cri-o://67d92002666cdf3baf0db8af5e52b65f139da2f7fb047074d000b28292c5bb78" gracePeriod=30 Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.166624 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5f87769df8-nbb75" podUID="c1728f1b-5640-4c6d-ba3c-c8096d0407c4" containerName="neutron-api" containerID="cri-o://aaec48903ccecffa0cded70c65e635a3cdf3d78a110bd64dc94c7f47f41b437c" gracePeriod=30 Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.170455 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6c383708-490b-4f40-a370-bce6a8d57c18" (UID: "6c383708-490b-4f40-a370-bce6a8d57c18"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.170563 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c383708-490b-4f40-a370-bce6a8d57c18-kube-api-access-lvn26" (OuterVolumeSpecName: "kube-api-access-lvn26") pod "6c383708-490b-4f40-a370-bce6a8d57c18" (UID: "6c383708-490b-4f40-a370-bce6a8d57c18"). InnerVolumeSpecName "kube-api-access-lvn26". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.186454 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c383708-490b-4f40-a370-bce6a8d57c18" (UID: "6c383708-490b-4f40-a370-bce6a8d57c18"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.249242 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvn26\" (UniqueName: \"kubernetes.io/projected/6c383708-490b-4f40-a370-bce6a8d57c18-kube-api-access-lvn26\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.249275 4935 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c383708-490b-4f40-a370-bce6a8d57c18-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.249285 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.249295 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.249304 4935 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c383708-490b-4f40-a370-bce6a8d57c18-logs\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.249312 4935 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.286424 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-config-data" (OuterVolumeSpecName: "config-data") pod "6c383708-490b-4f40-a370-bce6a8d57c18" (UID: "6c383708-490b-4f40-a370-bce6a8d57c18"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.351407 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c383708-490b-4f40-a370-bce6a8d57c18-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.449867 4935 generic.go:334] "Generic (PLEG): container finished" podID="c1728f1b-5640-4c6d-ba3c-c8096d0407c4" containerID="67d92002666cdf3baf0db8af5e52b65f139da2f7fb047074d000b28292c5bb78" exitCode=0 Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.449939 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f87769df8-nbb75" event={"ID":"c1728f1b-5640-4c6d-ba3c-c8096d0407c4","Type":"ContainerDied","Data":"67d92002666cdf3baf0db8af5e52b65f139da2f7fb047074d000b28292c5bb78"} Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.455634 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30a1f35-70d9-42c9-9bed-35ab762851ad","Type":"ContainerStarted","Data":"7618d1579091493cc0c50cc0348fe443cede3aad9485f6fde10018b81ee2a2ab"} Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.455705 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.461345 4935 generic.go:334] "Generic (PLEG): container finished" podID="6c383708-490b-4f40-a370-bce6a8d57c18" containerID="9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162" exitCode=0 Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.461367 4935 generic.go:334] "Generic (PLEG): container finished" podID="6c383708-490b-4f40-a370-bce6a8d57c18" containerID="22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd" exitCode=143 Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.461404 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6c383708-490b-4f40-a370-bce6a8d57c18","Type":"ContainerDied","Data":"9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162"} Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.461444 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6c383708-490b-4f40-a370-bce6a8d57c18","Type":"ContainerDied","Data":"22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd"} Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.461457 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6c383708-490b-4f40-a370-bce6a8d57c18","Type":"ContainerDied","Data":"d2df109cbdbf6b1dbece795953dfea2c79f9c4cc63b33a875fcbfdc68734b971"} Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.461472 4935 scope.go:117] "RemoveContainer" containerID="9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.461533 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.463843 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6b854f9d-n7chb" event={"ID":"0f1b009e-5300-416a-a397-79765cbcad0d","Type":"ContainerStarted","Data":"955ffe4c3121741a392ea2fa7ff4dff73dcfaff91624da37efd3465210b511d1"} Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.463873 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6b854f9d-n7chb" event={"ID":"0f1b009e-5300-416a-a397-79765cbcad0d","Type":"ContainerStarted","Data":"73f663bfbf911eee488c314e63afaebf08c8ffe22f6955f8c540f99244a77c64"} Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.463994 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.485376 4935 scope.go:117] "RemoveContainer" containerID="22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.489288 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.665210395 podStartE2EDuration="7.489266654s" podCreationTimestamp="2025-12-01 18:54:55 +0000 UTC" firstStartedPulling="2025-12-01 18:54:57.755082083 +0000 UTC m=+1511.776711352" lastFinishedPulling="2025-12-01 18:55:01.579138352 +0000 UTC m=+1515.600767611" observedRunningTime="2025-12-01 18:55:02.481021888 +0000 UTC m=+1516.502651147" watchObservedRunningTime="2025-12-01 18:55:02.489266654 +0000 UTC m=+1516.510895913" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.505979 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5f6b854f9d-n7chb" podStartSLOduration=2.505963191 podStartE2EDuration="2.505963191s" podCreationTimestamp="2025-12-01 18:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:02.502212615 +0000 UTC m=+1516.523841864" watchObservedRunningTime="2025-12-01 18:55:02.505963191 +0000 UTC m=+1516.527592450" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.509665 4935 scope.go:117] "RemoveContainer" containerID="9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162" Dec 01 18:55:02 crc kubenswrapper[4935]: E1201 18:55:02.510177 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162\": container with ID starting with 9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162 not found: ID does not exist" containerID="9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.510212 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162"} err="failed to get container status \"9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162\": rpc error: code = NotFound desc = could not find container \"9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162\": container with ID starting with 9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162 not found: ID does not exist" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.510234 4935 scope.go:117] "RemoveContainer" containerID="22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd" Dec 01 18:55:02 crc kubenswrapper[4935]: E1201 18:55:02.510595 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd\": container with ID starting with 22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd not found: ID does not exist" containerID="22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.510642 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd"} err="failed to get container status \"22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd\": rpc error: code = NotFound desc = could not find container \"22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd\": container with ID starting with 22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd not found: ID does not exist" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.510672 4935 scope.go:117] "RemoveContainer" containerID="9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.510944 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162"} err="failed to get container status \"9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162\": rpc error: code = NotFound desc = could not find container \"9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162\": container with ID starting with 9b60fc57beca41da1e602cce2f75e519933a5f803167acef7fcb9230fd779162 not found: ID does not exist" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.510987 4935 scope.go:117] "RemoveContainer" containerID="22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.511322 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd"} err="failed to get container status \"22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd\": rpc error: code = NotFound desc = could not find container \"22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd\": container with ID starting with 22e884cde89b6c1f7bec9e510ecc193527ad51fb118604837dba27041a68aacd not found: ID does not exist" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.531076 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" path="/var/lib/kubelet/pods/4d2c1d1a-fcb2-465a-a9f2-7439e891e19f/volumes" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.537117 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.551250 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.560686 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 01 18:55:02 crc kubenswrapper[4935]: E1201 18:55:02.561192 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" containerName="registry-server" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.561204 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" containerName="registry-server" Dec 01 18:55:02 crc kubenswrapper[4935]: E1201 18:55:02.561230 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" containerName="extract-utilities" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.561237 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" containerName="extract-utilities" Dec 01 18:55:02 crc kubenswrapper[4935]: E1201 18:55:02.561248 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c383708-490b-4f40-a370-bce6a8d57c18" containerName="cinder-api-log" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.561254 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c383708-490b-4f40-a370-bce6a8d57c18" containerName="cinder-api-log" Dec 01 18:55:02 crc kubenswrapper[4935]: E1201 18:55:02.561270 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" containerName="extract-content" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.561276 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" containerName="extract-content" Dec 01 18:55:02 crc kubenswrapper[4935]: E1201 18:55:02.561303 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c383708-490b-4f40-a370-bce6a8d57c18" containerName="cinder-api" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.561308 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c383708-490b-4f40-a370-bce6a8d57c18" containerName="cinder-api" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.561523 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d2c1d1a-fcb2-465a-a9f2-7439e891e19f" containerName="registry-server" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.561538 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c383708-490b-4f40-a370-bce6a8d57c18" containerName="cinder-api-log" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.561547 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c383708-490b-4f40-a370-bce6a8d57c18" containerName="cinder-api" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.563173 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.568503 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.569671 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.570023 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.570181 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.661612 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-logs\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.661652 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqpw6\" (UniqueName: \"kubernetes.io/projected/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-kube-api-access-dqpw6\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.661730 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.661831 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.661905 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-config-data-custom\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.661943 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.661962 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-config-data\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.662018 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-scripts\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.662057 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.774577 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.774672 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-config-data-custom\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.774712 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.774732 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-config-data\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.774809 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-scripts\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.774852 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.774887 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-logs\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.774912 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqpw6\" (UniqueName: \"kubernetes.io/projected/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-kube-api-access-dqpw6\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.774954 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.779874 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.784833 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.788892 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-config-data-custom\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.792676 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.793100 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-logs\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.794015 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.794541 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-scripts\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.801854 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-config-data\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.811634 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqpw6\" (UniqueName: \"kubernetes.io/projected/4ea7d483-2743-49a5-b6f0-a0e3355c2f2b-kube-api-access-dqpw6\") pod \"cinder-api-0\" (UID: \"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b\") " pod="openstack/cinder-api-0" Dec 01 18:55:02 crc kubenswrapper[4935]: I1201 18:55:02.962102 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 18:55:03 crc kubenswrapper[4935]: I1201 18:55:03.391698 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5d64f494d8-2clmq" Dec 01 18:55:03 crc kubenswrapper[4935]: I1201 18:55:03.481318 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:03 crc kubenswrapper[4935]: I1201 18:55:03.551133 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 18:55:04 crc kubenswrapper[4935]: I1201 18:55:04.535080 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c383708-490b-4f40-a370-bce6a8d57c18" path="/var/lib/kubelet/pods/6c383708-490b-4f40-a370-bce6a8d57c18/volumes" Dec 01 18:55:04 crc kubenswrapper[4935]: I1201 18:55:04.536666 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b","Type":"ContainerStarted","Data":"e10d1c73d5ea098bb5f099a80c8906929915b93ce27af3cffbb65801ebad0a9c"} Dec 01 18:55:04 crc kubenswrapper[4935]: I1201 18:55:04.536694 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b","Type":"ContainerStarted","Data":"1d25024522bb35b0839d1d59e22e2c94ff9e6c53b2d7f89fdc3d704febef4ad9"} Dec 01 18:55:05 crc kubenswrapper[4935]: I1201 18:55:05.525135 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4ea7d483-2743-49a5-b6f0-a0e3355c2f2b","Type":"ContainerStarted","Data":"2e4a50b0162200e247540df0e07454aada4b3315f052991e351c8c08bb2bd075"} Dec 01 18:55:05 crc kubenswrapper[4935]: I1201 18:55:05.525455 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 01 18:55:05 crc kubenswrapper[4935]: I1201 18:55:05.722380 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:55:05 crc kubenswrapper[4935]: I1201 18:55:05.732438 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:55:05 crc kubenswrapper[4935]: I1201 18:55:05.747754 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.7477367900000003 podStartE2EDuration="3.74773679s" podCreationTimestamp="2025-12-01 18:55:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:05.569762904 +0000 UTC m=+1519.591392193" watchObservedRunningTime="2025-12-01 18:55:05.74773679 +0000 UTC m=+1519.769366059" Dec 01 18:55:05 crc kubenswrapper[4935]: I1201 18:55:05.919057 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:55:05 crc kubenswrapper[4935]: I1201 18:55:05.993833 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-cv8dw"] Dec 01 18:55:05 crc kubenswrapper[4935]: I1201 18:55:05.994129 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" podUID="b830d4fa-99f6-4f0b-9220-75cf3170a78c" containerName="dnsmasq-dns" containerID="cri-o://1091ecf6b8016ea3f882be3ebea831e46a5edbe036c7bd3c5a35435947a40379" gracePeriod=10 Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.112803 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.160506 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.548723 4935 generic.go:334] "Generic (PLEG): container finished" podID="b830d4fa-99f6-4f0b-9220-75cf3170a78c" containerID="1091ecf6b8016ea3f882be3ebea831e46a5edbe036c7bd3c5a35435947a40379" exitCode=0 Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.549959 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" event={"ID":"b830d4fa-99f6-4f0b-9220-75cf3170a78c","Type":"ContainerDied","Data":"1091ecf6b8016ea3f882be3ebea831e46a5edbe036c7bd3c5a35435947a40379"} Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.550721 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="247005f0-69f8-48a1-a8f9-2f6c45aca362" containerName="cinder-scheduler" containerID="cri-o://52895d5ad61c4acadc54ee9f44e03f933ea27908f56d827e641ee93b49c1edda" gracePeriod=30 Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.551272 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="247005f0-69f8-48a1-a8f9-2f6c45aca362" containerName="probe" containerID="cri-o://9462a31c3b07b65201b8250871b4a3d7c201769ec0e2217d0b69aa8208a0dc20" gracePeriod=30 Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.715172 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.827138 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-ovsdbserver-sb\") pod \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.827545 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbrs9\" (UniqueName: \"kubernetes.io/projected/b830d4fa-99f6-4f0b-9220-75cf3170a78c-kube-api-access-fbrs9\") pod \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.827573 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-ovsdbserver-nb\") pod \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.827632 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-dns-svc\") pod \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.827715 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-config\") pod \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.827744 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-dns-swift-storage-0\") pod \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\" (UID: \"b830d4fa-99f6-4f0b-9220-75cf3170a78c\") " Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.837505 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b830d4fa-99f6-4f0b-9220-75cf3170a78c-kube-api-access-fbrs9" (OuterVolumeSpecName: "kube-api-access-fbrs9") pod "b830d4fa-99f6-4f0b-9220-75cf3170a78c" (UID: "b830d4fa-99f6-4f0b-9220-75cf3170a78c"). InnerVolumeSpecName "kube-api-access-fbrs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.918710 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b830d4fa-99f6-4f0b-9220-75cf3170a78c" (UID: "b830d4fa-99f6-4f0b-9220-75cf3170a78c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.928684 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b830d4fa-99f6-4f0b-9220-75cf3170a78c" (UID: "b830d4fa-99f6-4f0b-9220-75cf3170a78c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.930422 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.930448 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbrs9\" (UniqueName: \"kubernetes.io/projected/b830d4fa-99f6-4f0b-9220-75cf3170a78c-kube-api-access-fbrs9\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.930460 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.935784 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b830d4fa-99f6-4f0b-9220-75cf3170a78c" (UID: "b830d4fa-99f6-4f0b-9220-75cf3170a78c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.989674 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b830d4fa-99f6-4f0b-9220-75cf3170a78c" (UID: "b830d4fa-99f6-4f0b-9220-75cf3170a78c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:06 crc kubenswrapper[4935]: I1201 18:55:06.999240 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-config" (OuterVolumeSpecName: "config") pod "b830d4fa-99f6-4f0b-9220-75cf3170a78c" (UID: "b830d4fa-99f6-4f0b-9220-75cf3170a78c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.032994 4935 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.033032 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.033044 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b830d4fa-99f6-4f0b-9220-75cf3170a78c-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.586498 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" event={"ID":"b830d4fa-99f6-4f0b-9220-75cf3170a78c","Type":"ContainerDied","Data":"3b5dd279d772d8c8dd66877f11da4e9b264f1a5dd07f9641288782f4116ceeb7"} Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.586787 4935 scope.go:117] "RemoveContainer" containerID="1091ecf6b8016ea3f882be3ebea831e46a5edbe036c7bd3c5a35435947a40379" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.587105 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.595829 4935 generic.go:334] "Generic (PLEG): container finished" podID="247005f0-69f8-48a1-a8f9-2f6c45aca362" containerID="9462a31c3b07b65201b8250871b4a3d7c201769ec0e2217d0b69aa8208a0dc20" exitCode=0 Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.595952 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"247005f0-69f8-48a1-a8f9-2f6c45aca362","Type":"ContainerDied","Data":"9462a31c3b07b65201b8250871b4a3d7c201769ec0e2217d0b69aa8208a0dc20"} Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.606816 4935 generic.go:334] "Generic (PLEG): container finished" podID="c1728f1b-5640-4c6d-ba3c-c8096d0407c4" containerID="aaec48903ccecffa0cded70c65e635a3cdf3d78a110bd64dc94c7f47f41b437c" exitCode=0 Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.606873 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f87769df8-nbb75" event={"ID":"c1728f1b-5640-4c6d-ba3c-c8096d0407c4","Type":"ContainerDied","Data":"aaec48903ccecffa0cded70c65e635a3cdf3d78a110bd64dc94c7f47f41b437c"} Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.649605 4935 scope.go:117] "RemoveContainer" containerID="9a1b3a34a37f8006ce66a51e1e0660b6e6e87e48f0b31f9ab5a627fe132b3cb9" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.692574 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-cv8dw"] Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.705045 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-cv8dw"] Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.843455 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.857846 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-config\") pod \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.857922 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vhdz\" (UniqueName: \"kubernetes.io/projected/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-kube-api-access-6vhdz\") pod \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.857943 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-httpd-config\") pod \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.858051 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-ovndb-tls-certs\") pod \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.858542 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-combined-ca-bundle\") pod \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\" (UID: \"c1728f1b-5640-4c6d-ba3c-c8096d0407c4\") " Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.880349 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "c1728f1b-5640-4c6d-ba3c-c8096d0407c4" (UID: "c1728f1b-5640-4c6d-ba3c-c8096d0407c4"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.883575 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-kube-api-access-6vhdz" (OuterVolumeSpecName: "kube-api-access-6vhdz") pod "c1728f1b-5640-4c6d-ba3c-c8096d0407c4" (UID: "c1728f1b-5640-4c6d-ba3c-c8096d0407c4"). InnerVolumeSpecName "kube-api-access-6vhdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.962363 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vhdz\" (UniqueName: \"kubernetes.io/projected/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-kube-api-access-6vhdz\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.962589 4935 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.969269 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c1728f1b-5640-4c6d-ba3c-c8096d0407c4" (UID: "c1728f1b-5640-4c6d-ba3c-c8096d0407c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.972457 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "c1728f1b-5640-4c6d-ba3c-c8096d0407c4" (UID: "c1728f1b-5640-4c6d-ba3c-c8096d0407c4"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:07 crc kubenswrapper[4935]: I1201 18:55:07.973585 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-config" (OuterVolumeSpecName: "config") pod "c1728f1b-5640-4c6d-ba3c-c8096d0407c4" (UID: "c1728f1b-5640-4c6d-ba3c-c8096d0407c4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.064398 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.064432 4935 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.064442 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1728f1b-5640-4c6d-ba3c-c8096d0407c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.238860 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.369728 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-config-data\") pod \"247005f0-69f8-48a1-a8f9-2f6c45aca362\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.369779 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-combined-ca-bundle\") pod \"247005f0-69f8-48a1-a8f9-2f6c45aca362\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.369823 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-scripts\") pod \"247005f0-69f8-48a1-a8f9-2f6c45aca362\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.369864 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-config-data-custom\") pod \"247005f0-69f8-48a1-a8f9-2f6c45aca362\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.369899 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/247005f0-69f8-48a1-a8f9-2f6c45aca362-etc-machine-id\") pod \"247005f0-69f8-48a1-a8f9-2f6c45aca362\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.370003 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwlq9\" (UniqueName: \"kubernetes.io/projected/247005f0-69f8-48a1-a8f9-2f6c45aca362-kube-api-access-dwlq9\") pod \"247005f0-69f8-48a1-a8f9-2f6c45aca362\" (UID: \"247005f0-69f8-48a1-a8f9-2f6c45aca362\") " Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.370135 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/247005f0-69f8-48a1-a8f9-2f6c45aca362-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "247005f0-69f8-48a1-a8f9-2f6c45aca362" (UID: "247005f0-69f8-48a1-a8f9-2f6c45aca362"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.370546 4935 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/247005f0-69f8-48a1-a8f9-2f6c45aca362-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.374125 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/247005f0-69f8-48a1-a8f9-2f6c45aca362-kube-api-access-dwlq9" (OuterVolumeSpecName: "kube-api-access-dwlq9") pod "247005f0-69f8-48a1-a8f9-2f6c45aca362" (UID: "247005f0-69f8-48a1-a8f9-2f6c45aca362"). InnerVolumeSpecName "kube-api-access-dwlq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.383871 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "247005f0-69f8-48a1-a8f9-2f6c45aca362" (UID: "247005f0-69f8-48a1-a8f9-2f6c45aca362"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.384074 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-scripts" (OuterVolumeSpecName: "scripts") pod "247005f0-69f8-48a1-a8f9-2f6c45aca362" (UID: "247005f0-69f8-48a1-a8f9-2f6c45aca362"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.450541 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "247005f0-69f8-48a1-a8f9-2f6c45aca362" (UID: "247005f0-69f8-48a1-a8f9-2f6c45aca362"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.475439 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.475483 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.475495 4935 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.475509 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwlq9\" (UniqueName: \"kubernetes.io/projected/247005f0-69f8-48a1-a8f9-2f6c45aca362-kube-api-access-dwlq9\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.525603 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b830d4fa-99f6-4f0b-9220-75cf3170a78c" path="/var/lib/kubelet/pods/b830d4fa-99f6-4f0b-9220-75cf3170a78c/volumes" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.533230 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-config-data" (OuterVolumeSpecName: "config-data") pod "247005f0-69f8-48a1-a8f9-2f6c45aca362" (UID: "247005f0-69f8-48a1-a8f9-2f6c45aca362"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.577120 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247005f0-69f8-48a1-a8f9-2f6c45aca362-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.619212 4935 generic.go:334] "Generic (PLEG): container finished" podID="247005f0-69f8-48a1-a8f9-2f6c45aca362" containerID="52895d5ad61c4acadc54ee9f44e03f933ea27908f56d827e641ee93b49c1edda" exitCode=0 Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.619289 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.619297 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"247005f0-69f8-48a1-a8f9-2f6c45aca362","Type":"ContainerDied","Data":"52895d5ad61c4acadc54ee9f44e03f933ea27908f56d827e641ee93b49c1edda"} Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.619443 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"247005f0-69f8-48a1-a8f9-2f6c45aca362","Type":"ContainerDied","Data":"ed7fea9aeafd4c8c3ffbb8feb3557c6b5f9225e1a99369a8630bab43600c8b26"} Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.619481 4935 scope.go:117] "RemoveContainer" containerID="9462a31c3b07b65201b8250871b4a3d7c201769ec0e2217d0b69aa8208a0dc20" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.621463 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f87769df8-nbb75" event={"ID":"c1728f1b-5640-4c6d-ba3c-c8096d0407c4","Type":"ContainerDied","Data":"d124ba1cc6f694042eda249a1f7ee31e1254225bb4fbb353ba43854f7b34f901"} Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.621498 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f87769df8-nbb75" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.642665 4935 scope.go:117] "RemoveContainer" containerID="52895d5ad61c4acadc54ee9f44e03f933ea27908f56d827e641ee93b49c1edda" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.652110 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5f87769df8-nbb75"] Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.665534 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5f87769df8-nbb75"] Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.677328 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.684674 4935 scope.go:117] "RemoveContainer" containerID="9462a31c3b07b65201b8250871b4a3d7c201769ec0e2217d0b69aa8208a0dc20" Dec 01 18:55:08 crc kubenswrapper[4935]: E1201 18:55:08.685108 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9462a31c3b07b65201b8250871b4a3d7c201769ec0e2217d0b69aa8208a0dc20\": container with ID starting with 9462a31c3b07b65201b8250871b4a3d7c201769ec0e2217d0b69aa8208a0dc20 not found: ID does not exist" containerID="9462a31c3b07b65201b8250871b4a3d7c201769ec0e2217d0b69aa8208a0dc20" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.685136 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9462a31c3b07b65201b8250871b4a3d7c201769ec0e2217d0b69aa8208a0dc20"} err="failed to get container status \"9462a31c3b07b65201b8250871b4a3d7c201769ec0e2217d0b69aa8208a0dc20\": rpc error: code = NotFound desc = could not find container \"9462a31c3b07b65201b8250871b4a3d7c201769ec0e2217d0b69aa8208a0dc20\": container with ID starting with 9462a31c3b07b65201b8250871b4a3d7c201769ec0e2217d0b69aa8208a0dc20 not found: ID does not exist" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.685172 4935 scope.go:117] "RemoveContainer" containerID="52895d5ad61c4acadc54ee9f44e03f933ea27908f56d827e641ee93b49c1edda" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.687170 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 18:55:08 crc kubenswrapper[4935]: E1201 18:55:08.692308 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52895d5ad61c4acadc54ee9f44e03f933ea27908f56d827e641ee93b49c1edda\": container with ID starting with 52895d5ad61c4acadc54ee9f44e03f933ea27908f56d827e641ee93b49c1edda not found: ID does not exist" containerID="52895d5ad61c4acadc54ee9f44e03f933ea27908f56d827e641ee93b49c1edda" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.692355 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52895d5ad61c4acadc54ee9f44e03f933ea27908f56d827e641ee93b49c1edda"} err="failed to get container status \"52895d5ad61c4acadc54ee9f44e03f933ea27908f56d827e641ee93b49c1edda\": rpc error: code = NotFound desc = could not find container \"52895d5ad61c4acadc54ee9f44e03f933ea27908f56d827e641ee93b49c1edda\": container with ID starting with 52895d5ad61c4acadc54ee9f44e03f933ea27908f56d827e641ee93b49c1edda not found: ID does not exist" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.692384 4935 scope.go:117] "RemoveContainer" containerID="67d92002666cdf3baf0db8af5e52b65f139da2f7fb047074d000b28292c5bb78" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.699213 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 18:55:08 crc kubenswrapper[4935]: E1201 18:55:08.699771 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1728f1b-5640-4c6d-ba3c-c8096d0407c4" containerName="neutron-httpd" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.699786 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1728f1b-5640-4c6d-ba3c-c8096d0407c4" containerName="neutron-httpd" Dec 01 18:55:08 crc kubenswrapper[4935]: E1201 18:55:08.699811 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="247005f0-69f8-48a1-a8f9-2f6c45aca362" containerName="cinder-scheduler" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.699819 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="247005f0-69f8-48a1-a8f9-2f6c45aca362" containerName="cinder-scheduler" Dec 01 18:55:08 crc kubenswrapper[4935]: E1201 18:55:08.699834 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="247005f0-69f8-48a1-a8f9-2f6c45aca362" containerName="probe" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.699840 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="247005f0-69f8-48a1-a8f9-2f6c45aca362" containerName="probe" Dec 01 18:55:08 crc kubenswrapper[4935]: E1201 18:55:08.699856 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b830d4fa-99f6-4f0b-9220-75cf3170a78c" containerName="init" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.699862 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b830d4fa-99f6-4f0b-9220-75cf3170a78c" containerName="init" Dec 01 18:55:08 crc kubenswrapper[4935]: E1201 18:55:08.699893 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1728f1b-5640-4c6d-ba3c-c8096d0407c4" containerName="neutron-api" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.699898 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1728f1b-5640-4c6d-ba3c-c8096d0407c4" containerName="neutron-api" Dec 01 18:55:08 crc kubenswrapper[4935]: E1201 18:55:08.699937 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b830d4fa-99f6-4f0b-9220-75cf3170a78c" containerName="dnsmasq-dns" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.699946 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b830d4fa-99f6-4f0b-9220-75cf3170a78c" containerName="dnsmasq-dns" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.700189 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1728f1b-5640-4c6d-ba3c-c8096d0407c4" containerName="neutron-api" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.700211 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1728f1b-5640-4c6d-ba3c-c8096d0407c4" containerName="neutron-httpd" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.700223 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="247005f0-69f8-48a1-a8f9-2f6c45aca362" containerName="probe" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.700236 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="b830d4fa-99f6-4f0b-9220-75cf3170a78c" containerName="dnsmasq-dns" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.700245 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="247005f0-69f8-48a1-a8f9-2f6c45aca362" containerName="cinder-scheduler" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.701464 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.707824 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.711290 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.732405 4935 scope.go:117] "RemoveContainer" containerID="aaec48903ccecffa0cded70c65e635a3cdf3d78a110bd64dc94c7f47f41b437c" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.883093 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-scripts\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.883165 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.883187 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.883262 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.883299 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6cz7\" (UniqueName: \"kubernetes.io/projected/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-kube-api-access-r6cz7\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.883402 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-config-data\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.985739 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.986195 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.986385 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.986468 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6cz7\" (UniqueName: \"kubernetes.io/projected/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-kube-api-access-r6cz7\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.986596 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-config-data\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.986725 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-scripts\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.987081 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.992239 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-config-data\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.992318 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-scripts\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.992681 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:08 crc kubenswrapper[4935]: I1201 18:55:08.992969 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:09 crc kubenswrapper[4935]: I1201 18:55:09.009008 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6cz7\" (UniqueName: \"kubernetes.io/projected/5f18d08d-0b37-4ae1-afac-5377ccb99cc2-kube-api-access-r6cz7\") pod \"cinder-scheduler-0\" (UID: \"5f18d08d-0b37-4ae1-afac-5377ccb99cc2\") " pod="openstack/cinder-scheduler-0" Dec 01 18:55:09 crc kubenswrapper[4935]: I1201 18:55:09.021535 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 18:55:09 crc kubenswrapper[4935]: I1201 18:55:09.606123 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 18:55:09 crc kubenswrapper[4935]: I1201 18:55:09.660654 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5f18d08d-0b37-4ae1-afac-5377ccb99cc2","Type":"ContainerStarted","Data":"06796731f3491bba3828206c0aae2d42310745f9533c0dbcd163dd19e12a6959"} Dec 01 18:55:10 crc kubenswrapper[4935]: I1201 18:55:10.303324 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6fb8649598-lsccp" Dec 01 18:55:10 crc kubenswrapper[4935]: I1201 18:55:10.525543 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="247005f0-69f8-48a1-a8f9-2f6c45aca362" path="/var/lib/kubelet/pods/247005f0-69f8-48a1-a8f9-2f6c45aca362/volumes" Dec 01 18:55:10 crc kubenswrapper[4935]: I1201 18:55:10.526179 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1728f1b-5640-4c6d-ba3c-c8096d0407c4" path="/var/lib/kubelet/pods/c1728f1b-5640-4c6d-ba3c-c8096d0407c4/volumes" Dec 01 18:55:10 crc kubenswrapper[4935]: I1201 18:55:10.700530 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5f18d08d-0b37-4ae1-afac-5377ccb99cc2","Type":"ContainerStarted","Data":"40a70ece3f3e12acf9fdab0aa993668563ae48d718a0ea0ab62f3a5c220ba98a"} Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.157196 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.158579 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.160742 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.161516 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.164015 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-9mqpd" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.175058 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.348432 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44f0cb67-763e-4db1-b920-6331dfb40ba3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"44f0cb67-763e-4db1-b920-6331dfb40ba3\") " pod="openstack/openstackclient" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.348614 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/44f0cb67-763e-4db1-b920-6331dfb40ba3-openstack-config\") pod \"openstackclient\" (UID: \"44f0cb67-763e-4db1-b920-6331dfb40ba3\") " pod="openstack/openstackclient" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.348648 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/44f0cb67-763e-4db1-b920-6331dfb40ba3-openstack-config-secret\") pod \"openstackclient\" (UID: \"44f0cb67-763e-4db1-b920-6331dfb40ba3\") " pod="openstack/openstackclient" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.348763 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjj4p\" (UniqueName: \"kubernetes.io/projected/44f0cb67-763e-4db1-b920-6331dfb40ba3-kube-api-access-rjj4p\") pod \"openstackclient\" (UID: \"44f0cb67-763e-4db1-b920-6331dfb40ba3\") " pod="openstack/openstackclient" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.359280 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6b7b667979-cv8dw" podUID="b830d4fa-99f6-4f0b-9220-75cf3170a78c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.185:5353: i/o timeout" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.451452 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjj4p\" (UniqueName: \"kubernetes.io/projected/44f0cb67-763e-4db1-b920-6331dfb40ba3-kube-api-access-rjj4p\") pod \"openstackclient\" (UID: \"44f0cb67-763e-4db1-b920-6331dfb40ba3\") " pod="openstack/openstackclient" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.451588 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44f0cb67-763e-4db1-b920-6331dfb40ba3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"44f0cb67-763e-4db1-b920-6331dfb40ba3\") " pod="openstack/openstackclient" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.452766 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/44f0cb67-763e-4db1-b920-6331dfb40ba3-openstack-config\") pod \"openstackclient\" (UID: \"44f0cb67-763e-4db1-b920-6331dfb40ba3\") " pod="openstack/openstackclient" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.452811 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/44f0cb67-763e-4db1-b920-6331dfb40ba3-openstack-config-secret\") pod \"openstackclient\" (UID: \"44f0cb67-763e-4db1-b920-6331dfb40ba3\") " pod="openstack/openstackclient" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.453834 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/44f0cb67-763e-4db1-b920-6331dfb40ba3-openstack-config\") pod \"openstackclient\" (UID: \"44f0cb67-763e-4db1-b920-6331dfb40ba3\") " pod="openstack/openstackclient" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.460729 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/44f0cb67-763e-4db1-b920-6331dfb40ba3-openstack-config-secret\") pod \"openstackclient\" (UID: \"44f0cb67-763e-4db1-b920-6331dfb40ba3\") " pod="openstack/openstackclient" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.461462 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44f0cb67-763e-4db1-b920-6331dfb40ba3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"44f0cb67-763e-4db1-b920-6331dfb40ba3\") " pod="openstack/openstackclient" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.484022 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjj4p\" (UniqueName: \"kubernetes.io/projected/44f0cb67-763e-4db1-b920-6331dfb40ba3-kube-api-access-rjj4p\") pod \"openstackclient\" (UID: \"44f0cb67-763e-4db1-b920-6331dfb40ba3\") " pod="openstack/openstackclient" Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.722769 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5f18d08d-0b37-4ae1-afac-5377ccb99cc2","Type":"ContainerStarted","Data":"5b0ec34082872621050ba0f77f3c6ff834e6f81ed8f1159f8765ead9f16da5fb"} Dec 01 18:55:11 crc kubenswrapper[4935]: I1201 18:55:11.776975 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 01 18:55:12 crc kubenswrapper[4935]: I1201 18:55:12.314162 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:12 crc kubenswrapper[4935]: I1201 18:55:12.331088 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.33106663 podStartE2EDuration="4.33106663s" podCreationTimestamp="2025-12-01 18:55:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:11.761291619 +0000 UTC m=+1525.782920878" watchObservedRunningTime="2025-12-01 18:55:12.33106663 +0000 UTC m=+1526.352695899" Dec 01 18:55:12 crc kubenswrapper[4935]: I1201 18:55:12.338739 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 01 18:55:12 crc kubenswrapper[4935]: I1201 18:55:12.659820 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5f6b854f9d-n7chb" Dec 01 18:55:12 crc kubenswrapper[4935]: I1201 18:55:12.734785 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-577b88f67d-xws59"] Dec 01 18:55:12 crc kubenswrapper[4935]: I1201 18:55:12.735080 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-577b88f67d-xws59" podUID="17a9b394-1d22-44af-86f2-0c98ac7b43e4" containerName="barbican-api-log" containerID="cri-o://ea277e497d6351e79866fda37cc08f2c49389e6b572b0cb3f929da917a4fdf6a" gracePeriod=30 Dec 01 18:55:12 crc kubenswrapper[4935]: I1201 18:55:12.735573 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-577b88f67d-xws59" podUID="17a9b394-1d22-44af-86f2-0c98ac7b43e4" containerName="barbican-api" containerID="cri-o://3611b55a4768eb7b1b80ea0af8caa391c733e54d9851dd4b0fb3e78e7c1a049f" gracePeriod=30 Dec 01 18:55:12 crc kubenswrapper[4935]: I1201 18:55:12.756312 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"44f0cb67-763e-4db1-b920-6331dfb40ba3","Type":"ContainerStarted","Data":"af3bcbf1b812cf0aa94bf4c59bcb5d06083f730dca8e421c26c3f13fdcda0fc6"} Dec 01 18:55:13 crc kubenswrapper[4935]: I1201 18:55:13.793348 4935 generic.go:334] "Generic (PLEG): container finished" podID="17a9b394-1d22-44af-86f2-0c98ac7b43e4" containerID="ea277e497d6351e79866fda37cc08f2c49389e6b572b0cb3f929da917a4fdf6a" exitCode=143 Dec 01 18:55:13 crc kubenswrapper[4935]: I1201 18:55:13.793454 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-577b88f67d-xws59" event={"ID":"17a9b394-1d22-44af-86f2-0c98ac7b43e4","Type":"ContainerDied","Data":"ea277e497d6351e79866fda37cc08f2c49389e6b572b0cb3f929da917a4fdf6a"} Dec 01 18:55:14 crc kubenswrapper[4935]: I1201 18:55:14.022264 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 01 18:55:14 crc kubenswrapper[4935]: I1201 18:55:14.871626 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 01 18:55:15 crc kubenswrapper[4935]: I1201 18:55:15.946522 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-577b88f67d-xws59" podUID="17a9b394-1d22-44af-86f2-0c98ac7b43e4" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.196:9311/healthcheck\": read tcp 10.217.0.2:59516->10.217.0.196:9311: read: connection reset by peer" Dec 01 18:55:15 crc kubenswrapper[4935]: I1201 18:55:15.946526 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-577b88f67d-xws59" podUID="17a9b394-1d22-44af-86f2-0c98ac7b43e4" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.196:9311/healthcheck\": read tcp 10.217.0.2:59520->10.217.0.196:9311: read: connection reset by peer" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.008046 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.008325 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="ceilometer-central-agent" containerID="cri-o://d675c6c9d0dbd1ae89981a658cd0acf95b0ea7b1221e250627bcf21aa4713bd0" gracePeriod=30 Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.008572 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="proxy-httpd" containerID="cri-o://7618d1579091493cc0c50cc0348fe443cede3aad9485f6fde10018b81ee2a2ab" gracePeriod=30 Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.008741 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="ceilometer-notification-agent" containerID="cri-o://7c129fc653bcfcbcadc9cc03239139da234ba79e4d55b4db33f55849910b93ba" gracePeriod=30 Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.008771 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="sg-core" containerID="cri-o://748ec26e4aaa6182245ba5d4dc47e97789f7241556bd02482bb191e56fab9b67" gracePeriod=30 Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.018110 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 18:55:16 crc kubenswrapper[4935]: E1201 18:55:16.290851 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc30a1f35_70d9_42c9_9bed_35ab762851ad.slice/crio-conmon-748ec26e4aaa6182245ba5d4dc47e97789f7241556bd02482bb191e56fab9b67.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc30a1f35_70d9_42c9_9bed_35ab762851ad.slice/crio-7618d1579091493cc0c50cc0348fe443cede3aad9485f6fde10018b81ee2a2ab.scope\": RecentStats: unable to find data in memory cache]" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.596119 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.723423 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-config-data\") pod \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.723480 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-config-data-custom\") pod \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.723665 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-combined-ca-bundle\") pod \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.723762 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17a9b394-1d22-44af-86f2-0c98ac7b43e4-logs\") pod \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.723815 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzrz9\" (UniqueName: \"kubernetes.io/projected/17a9b394-1d22-44af-86f2-0c98ac7b43e4-kube-api-access-qzrz9\") pod \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\" (UID: \"17a9b394-1d22-44af-86f2-0c98ac7b43e4\") " Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.724325 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17a9b394-1d22-44af-86f2-0c98ac7b43e4-logs" (OuterVolumeSpecName: "logs") pod "17a9b394-1d22-44af-86f2-0c98ac7b43e4" (UID: "17a9b394-1d22-44af-86f2-0c98ac7b43e4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.729591 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "17a9b394-1d22-44af-86f2-0c98ac7b43e4" (UID: "17a9b394-1d22-44af-86f2-0c98ac7b43e4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.730672 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17a9b394-1d22-44af-86f2-0c98ac7b43e4-kube-api-access-qzrz9" (OuterVolumeSpecName: "kube-api-access-qzrz9") pod "17a9b394-1d22-44af-86f2-0c98ac7b43e4" (UID: "17a9b394-1d22-44af-86f2-0c98ac7b43e4"). InnerVolumeSpecName "kube-api-access-qzrz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.758857 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "17a9b394-1d22-44af-86f2-0c98ac7b43e4" (UID: "17a9b394-1d22-44af-86f2-0c98ac7b43e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.782141 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-config-data" (OuterVolumeSpecName: "config-data") pod "17a9b394-1d22-44af-86f2-0c98ac7b43e4" (UID: "17a9b394-1d22-44af-86f2-0c98ac7b43e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.826724 4935 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.826754 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.826763 4935 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17a9b394-1d22-44af-86f2-0c98ac7b43e4-logs\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.826772 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzrz9\" (UniqueName: \"kubernetes.io/projected/17a9b394-1d22-44af-86f2-0c98ac7b43e4-kube-api-access-qzrz9\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.826782 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17a9b394-1d22-44af-86f2-0c98ac7b43e4-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.836434 4935 generic.go:334] "Generic (PLEG): container finished" podID="17a9b394-1d22-44af-86f2-0c98ac7b43e4" containerID="3611b55a4768eb7b1b80ea0af8caa391c733e54d9851dd4b0fb3e78e7c1a049f" exitCode=0 Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.836506 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-577b88f67d-xws59" event={"ID":"17a9b394-1d22-44af-86f2-0c98ac7b43e4","Type":"ContainerDied","Data":"3611b55a4768eb7b1b80ea0af8caa391c733e54d9851dd4b0fb3e78e7c1a049f"} Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.836511 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-577b88f67d-xws59" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.836537 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-577b88f67d-xws59" event={"ID":"17a9b394-1d22-44af-86f2-0c98ac7b43e4","Type":"ContainerDied","Data":"7238540402b3d75c3b73c9f2dcfe12553ce9204c00ab52b1434787e55b411f6b"} Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.836560 4935 scope.go:117] "RemoveContainer" containerID="3611b55a4768eb7b1b80ea0af8caa391c733e54d9851dd4b0fb3e78e7c1a049f" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.849014 4935 generic.go:334] "Generic (PLEG): container finished" podID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerID="7618d1579091493cc0c50cc0348fe443cede3aad9485f6fde10018b81ee2a2ab" exitCode=0 Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.849041 4935 generic.go:334] "Generic (PLEG): container finished" podID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerID="748ec26e4aaa6182245ba5d4dc47e97789f7241556bd02482bb191e56fab9b67" exitCode=2 Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.849049 4935 generic.go:334] "Generic (PLEG): container finished" podID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerID="d675c6c9d0dbd1ae89981a658cd0acf95b0ea7b1221e250627bcf21aa4713bd0" exitCode=0 Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.849070 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30a1f35-70d9-42c9-9bed-35ab762851ad","Type":"ContainerDied","Data":"7618d1579091493cc0c50cc0348fe443cede3aad9485f6fde10018b81ee2a2ab"} Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.849095 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30a1f35-70d9-42c9-9bed-35ab762851ad","Type":"ContainerDied","Data":"748ec26e4aaa6182245ba5d4dc47e97789f7241556bd02482bb191e56fab9b67"} Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.849106 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30a1f35-70d9-42c9-9bed-35ab762851ad","Type":"ContainerDied","Data":"d675c6c9d0dbd1ae89981a658cd0acf95b0ea7b1221e250627bcf21aa4713bd0"} Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.863566 4935 scope.go:117] "RemoveContainer" containerID="ea277e497d6351e79866fda37cc08f2c49389e6b572b0cb3f929da917a4fdf6a" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.876545 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-577b88f67d-xws59"] Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.886636 4935 scope.go:117] "RemoveContainer" containerID="3611b55a4768eb7b1b80ea0af8caa391c733e54d9851dd4b0fb3e78e7c1a049f" Dec 01 18:55:16 crc kubenswrapper[4935]: E1201 18:55:16.887075 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3611b55a4768eb7b1b80ea0af8caa391c733e54d9851dd4b0fb3e78e7c1a049f\": container with ID starting with 3611b55a4768eb7b1b80ea0af8caa391c733e54d9851dd4b0fb3e78e7c1a049f not found: ID does not exist" containerID="3611b55a4768eb7b1b80ea0af8caa391c733e54d9851dd4b0fb3e78e7c1a049f" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.887117 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3611b55a4768eb7b1b80ea0af8caa391c733e54d9851dd4b0fb3e78e7c1a049f"} err="failed to get container status \"3611b55a4768eb7b1b80ea0af8caa391c733e54d9851dd4b0fb3e78e7c1a049f\": rpc error: code = NotFound desc = could not find container \"3611b55a4768eb7b1b80ea0af8caa391c733e54d9851dd4b0fb3e78e7c1a049f\": container with ID starting with 3611b55a4768eb7b1b80ea0af8caa391c733e54d9851dd4b0fb3e78e7c1a049f not found: ID does not exist" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.887159 4935 scope.go:117] "RemoveContainer" containerID="ea277e497d6351e79866fda37cc08f2c49389e6b572b0cb3f929da917a4fdf6a" Dec 01 18:55:16 crc kubenswrapper[4935]: E1201 18:55:16.887768 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea277e497d6351e79866fda37cc08f2c49389e6b572b0cb3f929da917a4fdf6a\": container with ID starting with ea277e497d6351e79866fda37cc08f2c49389e6b572b0cb3f929da917a4fdf6a not found: ID does not exist" containerID="ea277e497d6351e79866fda37cc08f2c49389e6b572b0cb3f929da917a4fdf6a" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.887804 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea277e497d6351e79866fda37cc08f2c49389e6b572b0cb3f929da917a4fdf6a"} err="failed to get container status \"ea277e497d6351e79866fda37cc08f2c49389e6b572b0cb3f929da917a4fdf6a\": rpc error: code = NotFound desc = could not find container \"ea277e497d6351e79866fda37cc08f2c49389e6b572b0cb3f929da917a4fdf6a\": container with ID starting with ea277e497d6351e79866fda37cc08f2c49389e6b572b0cb3f929da917a4fdf6a not found: ID does not exist" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.890228 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-577b88f67d-xws59"] Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.943733 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-6b89d75d8c-8d6z5"] Dec 01 18:55:16 crc kubenswrapper[4935]: E1201 18:55:16.944201 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17a9b394-1d22-44af-86f2-0c98ac7b43e4" containerName="barbican-api" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.944218 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="17a9b394-1d22-44af-86f2-0c98ac7b43e4" containerName="barbican-api" Dec 01 18:55:16 crc kubenswrapper[4935]: E1201 18:55:16.944251 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17a9b394-1d22-44af-86f2-0c98ac7b43e4" containerName="barbican-api-log" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.944258 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="17a9b394-1d22-44af-86f2-0c98ac7b43e4" containerName="barbican-api-log" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.944474 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="17a9b394-1d22-44af-86f2-0c98ac7b43e4" containerName="barbican-api" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.944500 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="17a9b394-1d22-44af-86f2-0c98ac7b43e4" containerName="barbican-api-log" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.945589 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.952458 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.952810 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.953299 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 01 18:55:16 crc kubenswrapper[4935]: I1201 18:55:16.961302 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6b89d75d8c-8d6z5"] Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.030944 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97b7792-f596-4358-8b02-1ae1368ac68d-config-data\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.031002 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brchp\" (UniqueName: \"kubernetes.io/projected/d97b7792-f596-4358-8b02-1ae1368ac68d-kube-api-access-brchp\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.031106 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d97b7792-f596-4358-8b02-1ae1368ac68d-etc-swift\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.031183 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97b7792-f596-4358-8b02-1ae1368ac68d-internal-tls-certs\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.031217 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97b7792-f596-4358-8b02-1ae1368ac68d-combined-ca-bundle\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.031235 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d97b7792-f596-4358-8b02-1ae1368ac68d-run-httpd\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.031260 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d97b7792-f596-4358-8b02-1ae1368ac68d-log-httpd\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.031287 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97b7792-f596-4358-8b02-1ae1368ac68d-public-tls-certs\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.133116 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d97b7792-f596-4358-8b02-1ae1368ac68d-etc-swift\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.133525 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97b7792-f596-4358-8b02-1ae1368ac68d-internal-tls-certs\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.133568 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97b7792-f596-4358-8b02-1ae1368ac68d-combined-ca-bundle\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.133608 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d97b7792-f596-4358-8b02-1ae1368ac68d-run-httpd\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.133638 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d97b7792-f596-4358-8b02-1ae1368ac68d-log-httpd\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.133683 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97b7792-f596-4358-8b02-1ae1368ac68d-public-tls-certs\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.133765 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97b7792-f596-4358-8b02-1ae1368ac68d-config-data\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.133803 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brchp\" (UniqueName: \"kubernetes.io/projected/d97b7792-f596-4358-8b02-1ae1368ac68d-kube-api-access-brchp\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.134277 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d97b7792-f596-4358-8b02-1ae1368ac68d-log-httpd\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.134313 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d97b7792-f596-4358-8b02-1ae1368ac68d-run-httpd\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.139799 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97b7792-f596-4358-8b02-1ae1368ac68d-public-tls-certs\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.140011 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97b7792-f596-4358-8b02-1ae1368ac68d-config-data\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.142021 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d97b7792-f596-4358-8b02-1ae1368ac68d-etc-swift\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.144864 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97b7792-f596-4358-8b02-1ae1368ac68d-internal-tls-certs\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.146414 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97b7792-f596-4358-8b02-1ae1368ac68d-combined-ca-bundle\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.151825 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brchp\" (UniqueName: \"kubernetes.io/projected/d97b7792-f596-4358-8b02-1ae1368ac68d-kube-api-access-brchp\") pod \"swift-proxy-6b89d75d8c-8d6z5\" (UID: \"d97b7792-f596-4358-8b02-1ae1368ac68d\") " pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.278016 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.874803 4935 generic.go:334] "Generic (PLEG): container finished" podID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerID="7c129fc653bcfcbcadc9cc03239139da234ba79e4d55b4db33f55849910b93ba" exitCode=0 Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.874891 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30a1f35-70d9-42c9-9bed-35ab762851ad","Type":"ContainerDied","Data":"7c129fc653bcfcbcadc9cc03239139da234ba79e4d55b4db33f55849910b93ba"} Dec 01 18:55:17 crc kubenswrapper[4935]: I1201 18:55:17.909480 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6b89d75d8c-8d6z5"] Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.193976 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-7b798c567c-9slfq"] Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.200836 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.203383 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-tvgrx" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.203542 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.210142 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.219629 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-7b798c567c-9slfq"] Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.274544 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxtmf\" (UniqueName: \"kubernetes.io/projected/67884ebb-46bf-417e-b499-776f74720d64-kube-api-access-sxtmf\") pod \"heat-engine-7b798c567c-9slfq\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.274639 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-config-data-custom\") pod \"heat-engine-7b798c567c-9slfq\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.274684 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-combined-ca-bundle\") pod \"heat-engine-7b798c567c-9slfq\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.274753 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-config-data\") pod \"heat-engine-7b798c567c-9slfq\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.347634 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-5dc988865-sv2ln"] Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.349421 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.359615 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.370418 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-5dc988865-sv2ln"] Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.376718 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxtmf\" (UniqueName: \"kubernetes.io/projected/67884ebb-46bf-417e-b499-776f74720d64-kube-api-access-sxtmf\") pod \"heat-engine-7b798c567c-9slfq\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.376807 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-config-data-custom\") pod \"heat-engine-7b798c567c-9slfq\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.376855 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-combined-ca-bundle\") pod \"heat-engine-7b798c567c-9slfq\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.376926 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-config-data\") pod \"heat-engine-7b798c567c-9slfq\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.388088 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-config-data-custom\") pod \"heat-engine-7b798c567c-9slfq\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.397910 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-combined-ca-bundle\") pod \"heat-engine-7b798c567c-9slfq\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.409391 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-config-data\") pod \"heat-engine-7b798c567c-9slfq\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.425257 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-688b9f5b49-tr78h"] Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.427017 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.427870 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxtmf\" (UniqueName: \"kubernetes.io/projected/67884ebb-46bf-417e-b499-776f74720d64-kube-api-access-sxtmf\") pod \"heat-engine-7b798c567c-9slfq\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.463505 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688b9f5b49-tr78h"] Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.490998 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdddr\" (UniqueName: \"kubernetes.io/projected/750735d4-ef3a-4fad-b258-13bd36897efa-kube-api-access-rdddr\") pod \"heat-cfnapi-5dc988865-sv2ln\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.491409 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-config-data\") pod \"heat-cfnapi-5dc988865-sv2ln\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.491497 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-config-data-custom\") pod \"heat-cfnapi-5dc988865-sv2ln\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.491524 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-combined-ca-bundle\") pod \"heat-cfnapi-5dc988865-sv2ln\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.538473 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.567956 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17a9b394-1d22-44af-86f2-0c98ac7b43e4" path="/var/lib/kubelet/pods/17a9b394-1d22-44af-86f2-0c98ac7b43e4/volumes" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.569408 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-75b65c59cf-g7mdb"] Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.570819 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-75b65c59cf-g7mdb"] Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.570970 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.573672 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.594882 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9wbk\" (UniqueName: \"kubernetes.io/projected/236b38ba-f435-4e71-9777-a15c545ce185-kube-api-access-d9wbk\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.594931 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-config-data-custom\") pod \"heat-cfnapi-5dc988865-sv2ln\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.594979 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-combined-ca-bundle\") pod \"heat-cfnapi-5dc988865-sv2ln\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.595002 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdddr\" (UniqueName: \"kubernetes.io/projected/750735d4-ef3a-4fad-b258-13bd36897efa-kube-api-access-rdddr\") pod \"heat-cfnapi-5dc988865-sv2ln\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.595060 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-config\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.595104 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-ovsdbserver-nb\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.595154 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-ovsdbserver-sb\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.595217 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-dns-svc\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.595241 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-config-data\") pod \"heat-cfnapi-5dc988865-sv2ln\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.595289 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-dns-swift-storage-0\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.600568 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-combined-ca-bundle\") pod \"heat-cfnapi-5dc988865-sv2ln\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.602846 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-config-data-custom\") pod \"heat-cfnapi-5dc988865-sv2ln\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.606263 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-config-data\") pod \"heat-cfnapi-5dc988865-sv2ln\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.623143 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdddr\" (UniqueName: \"kubernetes.io/projected/750735d4-ef3a-4fad-b258-13bd36897efa-kube-api-access-rdddr\") pod \"heat-cfnapi-5dc988865-sv2ln\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.697553 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-combined-ca-bundle\") pod \"heat-api-75b65c59cf-g7mdb\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.697601 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-ovsdbserver-nb\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.697646 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-ovsdbserver-sb\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.697704 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-dns-svc\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.697763 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-dns-swift-storage-0\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.697795 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9wbk\" (UniqueName: \"kubernetes.io/projected/236b38ba-f435-4e71-9777-a15c545ce185-kube-api-access-d9wbk\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.697812 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-config-data-custom\") pod \"heat-api-75b65c59cf-g7mdb\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.697883 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-config\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.697913 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-config-data\") pod \"heat-api-75b65c59cf-g7mdb\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.697933 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbbtr\" (UniqueName: \"kubernetes.io/projected/3394c6e9-bf83-4326-885f-484ebbce1d4d-kube-api-access-dbbtr\") pod \"heat-api-75b65c59cf-g7mdb\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.701203 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-ovsdbserver-sb\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.701739 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-dns-svc\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.702253 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-dns-swift-storage-0\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.703436 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-config\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.711803 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-ovsdbserver-nb\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.733177 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9wbk\" (UniqueName: \"kubernetes.io/projected/236b38ba-f435-4e71-9777-a15c545ce185-kube-api-access-d9wbk\") pod \"dnsmasq-dns-688b9f5b49-tr78h\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.799688 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-config-data-custom\") pod \"heat-api-75b65c59cf-g7mdb\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.799799 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-config-data\") pod \"heat-api-75b65c59cf-g7mdb\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.799849 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbbtr\" (UniqueName: \"kubernetes.io/projected/3394c6e9-bf83-4326-885f-484ebbce1d4d-kube-api-access-dbbtr\") pod \"heat-api-75b65c59cf-g7mdb\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.799871 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-combined-ca-bundle\") pod \"heat-api-75b65c59cf-g7mdb\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.806773 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-combined-ca-bundle\") pod \"heat-api-75b65c59cf-g7mdb\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.807450 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-config-data\") pod \"heat-api-75b65c59cf-g7mdb\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.819508 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-config-data-custom\") pod \"heat-api-75b65c59cf-g7mdb\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.820298 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.826095 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbbtr\" (UniqueName: \"kubernetes.io/projected/3394c6e9-bf83-4326-885f-484ebbce1d4d-kube-api-access-dbbtr\") pod \"heat-api-75b65c59cf-g7mdb\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.831712 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:18 crc kubenswrapper[4935]: I1201 18:55:18.993265 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:19 crc kubenswrapper[4935]: I1201 18:55:19.699601 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 01 18:55:22 crc kubenswrapper[4935]: I1201 18:55:22.220775 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:55:22 crc kubenswrapper[4935]: I1201 18:55:22.221640 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d586b231-e06e-4111-8f29-0b8d7c12eccc" containerName="glance-log" containerID="cri-o://da3dd5309546b5603d19f4c86102b686f99bf6418473290102ee266321691dde" gracePeriod=30 Dec 01 18:55:22 crc kubenswrapper[4935]: I1201 18:55:22.221807 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d586b231-e06e-4111-8f29-0b8d7c12eccc" containerName="glance-httpd" containerID="cri-o://bf23f543147880e332d6f460edcee9195c5886c4de4b7b1053a708774872c4e5" gracePeriod=30 Dec 01 18:55:22 crc kubenswrapper[4935]: I1201 18:55:22.940874 4935 generic.go:334] "Generic (PLEG): container finished" podID="d586b231-e06e-4111-8f29-0b8d7c12eccc" containerID="da3dd5309546b5603d19f4c86102b686f99bf6418473290102ee266321691dde" exitCode=143 Dec 01 18:55:22 crc kubenswrapper[4935]: I1201 18:55:22.940962 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d586b231-e06e-4111-8f29-0b8d7c12eccc","Type":"ContainerDied","Data":"da3dd5309546b5603d19f4c86102b686f99bf6418473290102ee266321691dde"} Dec 01 18:55:24 crc kubenswrapper[4935]: I1201 18:55:24.346716 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:55:24 crc kubenswrapper[4935]: I1201 18:55:24.347088 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:55:25 crc kubenswrapper[4935]: W1201 18:55:25.259197 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd97b7792_f596_4358_8b02_1ae1368ac68d.slice/crio-1038bedf29ad3848591e29b584c11a0fef0b87fa711214853ca677dd5dcb4044 WatchSource:0}: Error finding container 1038bedf29ad3848591e29b584c11a0fef0b87fa711214853ca677dd5dcb4044: Status 404 returned error can't find the container with id 1038bedf29ad3848591e29b584c11a0fef0b87fa711214853ca677dd5dcb4044 Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.663633 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.757692 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30a1f35-70d9-42c9-9bed-35ab762851ad-run-httpd\") pod \"c30a1f35-70d9-42c9-9bed-35ab762851ad\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.757744 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-scripts\") pod \"c30a1f35-70d9-42c9-9bed-35ab762851ad\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.757778 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-combined-ca-bundle\") pod \"c30a1f35-70d9-42c9-9bed-35ab762851ad\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.757817 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-config-data\") pod \"c30a1f35-70d9-42c9-9bed-35ab762851ad\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.757847 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30a1f35-70d9-42c9-9bed-35ab762851ad-log-httpd\") pod \"c30a1f35-70d9-42c9-9bed-35ab762851ad\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.757955 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfvk\" (UniqueName: \"kubernetes.io/projected/c30a1f35-70d9-42c9-9bed-35ab762851ad-kube-api-access-9xfvk\") pod \"c30a1f35-70d9-42c9-9bed-35ab762851ad\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.757992 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-sg-core-conf-yaml\") pod \"c30a1f35-70d9-42c9-9bed-35ab762851ad\" (UID: \"c30a1f35-70d9-42c9-9bed-35ab762851ad\") " Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.758454 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c30a1f35-70d9-42c9-9bed-35ab762851ad-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c30a1f35-70d9-42c9-9bed-35ab762851ad" (UID: "c30a1f35-70d9-42c9-9bed-35ab762851ad"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.758878 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c30a1f35-70d9-42c9-9bed-35ab762851ad-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c30a1f35-70d9-42c9-9bed-35ab762851ad" (UID: "c30a1f35-70d9-42c9-9bed-35ab762851ad"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.759077 4935 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30a1f35-70d9-42c9-9bed-35ab762851ad-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.759097 4935 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30a1f35-70d9-42c9-9bed-35ab762851ad-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.763747 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-scripts" (OuterVolumeSpecName: "scripts") pod "c30a1f35-70d9-42c9-9bed-35ab762851ad" (UID: "c30a1f35-70d9-42c9-9bed-35ab762851ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.767404 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c30a1f35-70d9-42c9-9bed-35ab762851ad-kube-api-access-9xfvk" (OuterVolumeSpecName: "kube-api-access-9xfvk") pod "c30a1f35-70d9-42c9-9bed-35ab762851ad" (UID: "c30a1f35-70d9-42c9-9bed-35ab762851ad"). InnerVolumeSpecName "kube-api-access-9xfvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.796477 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c30a1f35-70d9-42c9-9bed-35ab762851ad" (UID: "c30a1f35-70d9-42c9-9bed-35ab762851ad"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.861218 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfvk\" (UniqueName: \"kubernetes.io/projected/c30a1f35-70d9-42c9-9bed-35ab762851ad-kube-api-access-9xfvk\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.861255 4935 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.861265 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.922891 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c30a1f35-70d9-42c9-9bed-35ab762851ad" (UID: "c30a1f35-70d9-42c9-9bed-35ab762851ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.963825 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.998493 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-6674fffddb-zqfh2"] Dec 01 18:55:25 crc kubenswrapper[4935]: E1201 18:55:25.999036 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="ceilometer-notification-agent" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.999053 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="ceilometer-notification-agent" Dec 01 18:55:25 crc kubenswrapper[4935]: E1201 18:55:25.999087 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="proxy-httpd" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.999093 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="proxy-httpd" Dec 01 18:55:25 crc kubenswrapper[4935]: E1201 18:55:25.999104 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="ceilometer-central-agent" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.999111 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="ceilometer-central-agent" Dec 01 18:55:25 crc kubenswrapper[4935]: E1201 18:55:25.999125 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="sg-core" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.999131 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="sg-core" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.999327 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="ceilometer-notification-agent" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.999338 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="proxy-httpd" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.999349 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="sg-core" Dec 01 18:55:25 crc kubenswrapper[4935]: I1201 18:55:25.999363 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" containerName="ceilometer-central-agent" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.002423 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.014528 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-69cfbb4c64-24bqh"] Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.015940 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.028022 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-764bd4bf4b-gnb6w"] Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.029478 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.035412 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"44f0cb67-763e-4db1-b920-6331dfb40ba3","Type":"ContainerStarted","Data":"c28c7beefd59217ab3bdcaf2e1e36662a4912de71192b4cc838fbc1d0c19cec3"} Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.044588 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-69cfbb4c64-24bqh"] Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.053657 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6b89d75d8c-8d6z5" event={"ID":"d97b7792-f596-4358-8b02-1ae1368ac68d","Type":"ContainerStarted","Data":"ece1498750da04064ca6483581b2ba0eb6611fc6b404eed335f706106926eee0"} Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.053706 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6b89d75d8c-8d6z5" event={"ID":"d97b7792-f596-4358-8b02-1ae1368ac68d","Type":"ContainerStarted","Data":"1038bedf29ad3848591e29b584c11a0fef0b87fa711214853ca677dd5dcb4044"} Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.057057 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-config-data" (OuterVolumeSpecName: "config-data") pod "c30a1f35-70d9-42c9-9bed-35ab762851ad" (UID: "c30a1f35-70d9-42c9-9bed-35ab762851ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.059232 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-6674fffddb-zqfh2"] Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.065166 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-config-data\") pod \"heat-engine-69cfbb4c64-24bqh\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.065206 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-combined-ca-bundle\") pod \"heat-cfnapi-764bd4bf4b-gnb6w\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.065257 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data\") pod \"heat-cfnapi-764bd4bf4b-gnb6w\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.065277 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-combined-ca-bundle\") pod \"heat-api-6674fffddb-zqfh2\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.067692 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-config-data\") pod \"heat-api-6674fffddb-zqfh2\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.067852 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m597\" (UniqueName: \"kubernetes.io/projected/ce0e14b6-6e7f-467a-bad9-9479311d6c89-kube-api-access-6m597\") pod \"heat-engine-69cfbb4c64-24bqh\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.067900 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9jlq\" (UniqueName: \"kubernetes.io/projected/b7f13e0e-b593-44c8-b015-3bb2d32b896a-kube-api-access-t9jlq\") pod \"heat-cfnapi-764bd4bf4b-gnb6w\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.067943 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-config-data-custom\") pod \"heat-engine-69cfbb4c64-24bqh\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.067995 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-combined-ca-bundle\") pod \"heat-engine-69cfbb4c64-24bqh\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.068025 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-config-data-custom\") pod \"heat-api-6674fffddb-zqfh2\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.068079 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data-custom\") pod \"heat-cfnapi-764bd4bf4b-gnb6w\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.068134 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7n2t\" (UniqueName: \"kubernetes.io/projected/2b1755ee-18df-46ef-be6d-b81b7967d831-kube-api-access-n7n2t\") pod \"heat-api-6674fffddb-zqfh2\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.068257 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c30a1f35-70d9-42c9-9bed-35ab762851ad-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.073174 4935 generic.go:334] "Generic (PLEG): container finished" podID="d586b231-e06e-4111-8f29-0b8d7c12eccc" containerID="bf23f543147880e332d6f460edcee9195c5886c4de4b7b1053a708774872c4e5" exitCode=0 Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.073432 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d586b231-e06e-4111-8f29-0b8d7c12eccc","Type":"ContainerDied","Data":"bf23f543147880e332d6f460edcee9195c5886c4de4b7b1053a708774872c4e5"} Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.081937 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30a1f35-70d9-42c9-9bed-35ab762851ad","Type":"ContainerDied","Data":"e98c3c2bfa7c474df1e852c9dcc5cec04f75d3d907e29f9bf56a63a2509eb9dd"} Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.082002 4935 scope.go:117] "RemoveContainer" containerID="7618d1579091493cc0c50cc0348fe443cede3aad9485f6fde10018b81ee2a2ab" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.082220 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.086849 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-764bd4bf4b-gnb6w"] Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.144007 4935 scope.go:117] "RemoveContainer" containerID="748ec26e4aaa6182245ba5d4dc47e97789f7241556bd02482bb191e56fab9b67" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.155732 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.027970743 podStartE2EDuration="15.155710398s" podCreationTimestamp="2025-12-01 18:55:11 +0000 UTC" firstStartedPulling="2025-12-01 18:55:12.346989524 +0000 UTC m=+1526.368618793" lastFinishedPulling="2025-12-01 18:55:25.474729189 +0000 UTC m=+1539.496358448" observedRunningTime="2025-12-01 18:55:26.095520873 +0000 UTC m=+1540.117150142" watchObservedRunningTime="2025-12-01 18:55:26.155710398 +0000 UTC m=+1540.177339657" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.170104 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data\") pod \"heat-cfnapi-764bd4bf4b-gnb6w\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.170163 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-combined-ca-bundle\") pod \"heat-api-6674fffddb-zqfh2\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.170186 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-config-data\") pod \"heat-api-6674fffddb-zqfh2\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.170302 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m597\" (UniqueName: \"kubernetes.io/projected/ce0e14b6-6e7f-467a-bad9-9479311d6c89-kube-api-access-6m597\") pod \"heat-engine-69cfbb4c64-24bqh\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.170322 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9jlq\" (UniqueName: \"kubernetes.io/projected/b7f13e0e-b593-44c8-b015-3bb2d32b896a-kube-api-access-t9jlq\") pod \"heat-cfnapi-764bd4bf4b-gnb6w\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.170366 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-config-data-custom\") pod \"heat-engine-69cfbb4c64-24bqh\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.170390 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-combined-ca-bundle\") pod \"heat-engine-69cfbb4c64-24bqh\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.170416 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-config-data-custom\") pod \"heat-api-6674fffddb-zqfh2\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.170442 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data-custom\") pod \"heat-cfnapi-764bd4bf4b-gnb6w\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.170475 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7n2t\" (UniqueName: \"kubernetes.io/projected/2b1755ee-18df-46ef-be6d-b81b7967d831-kube-api-access-n7n2t\") pod \"heat-api-6674fffddb-zqfh2\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.170516 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-config-data\") pod \"heat-engine-69cfbb4c64-24bqh\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.170541 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-combined-ca-bundle\") pod \"heat-cfnapi-764bd4bf4b-gnb6w\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.171815 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.176481 4935 scope.go:117] "RemoveContainer" containerID="7c129fc653bcfcbcadc9cc03239139da234ba79e4d55b4db33f55849910b93ba" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.178501 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-config-data\") pod \"heat-api-6674fffddb-zqfh2\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.179289 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-combined-ca-bundle\") pod \"heat-cfnapi-764bd4bf4b-gnb6w\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.181675 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data-custom\") pod \"heat-cfnapi-764bd4bf4b-gnb6w\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.188929 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-config-data\") pod \"heat-engine-69cfbb4c64-24bqh\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.189937 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-combined-ca-bundle\") pod \"heat-engine-69cfbb4c64-24bqh\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.193296 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data\") pod \"heat-cfnapi-764bd4bf4b-gnb6w\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.193591 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-config-data-custom\") pod \"heat-engine-69cfbb4c64-24bqh\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.194279 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-combined-ca-bundle\") pod \"heat-api-6674fffddb-zqfh2\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.197025 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.197663 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9jlq\" (UniqueName: \"kubernetes.io/projected/b7f13e0e-b593-44c8-b015-3bb2d32b896a-kube-api-access-t9jlq\") pod \"heat-cfnapi-764bd4bf4b-gnb6w\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.200000 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-config-data-custom\") pod \"heat-api-6674fffddb-zqfh2\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.201855 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m597\" (UniqueName: \"kubernetes.io/projected/ce0e14b6-6e7f-467a-bad9-9479311d6c89-kube-api-access-6m597\") pod \"heat-engine-69cfbb4c64-24bqh\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.210330 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7n2t\" (UniqueName: \"kubernetes.io/projected/2b1755ee-18df-46ef-be6d-b81b7967d831-kube-api-access-n7n2t\") pod \"heat-api-6674fffddb-zqfh2\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.215202 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.218068 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.222507 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.222884 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.235260 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.236042 4935 scope.go:117] "RemoveContainer" containerID="d675c6c9d0dbd1ae89981a658cd0acf95b0ea7b1221e250627bcf21aa4713bd0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.273802 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53179c75-37bd-4459-bfc3-1afce65aee64-run-httpd\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.273876 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.273924 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.273942 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-scripts\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.273961 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-config-data\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.273985 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53179c75-37bd-4459-bfc3-1afce65aee64-log-httpd\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.274023 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl5g7\" (UniqueName: \"kubernetes.io/projected/53179c75-37bd-4459-bfc3-1afce65aee64-kube-api-access-wl5g7\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.365783 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.375570 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53179c75-37bd-4459-bfc3-1afce65aee64-run-httpd\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.375622 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.375652 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.375668 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-scripts\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.375688 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-config-data\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.375709 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53179c75-37bd-4459-bfc3-1afce65aee64-log-httpd\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.375744 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl5g7\" (UniqueName: \"kubernetes.io/projected/53179c75-37bd-4459-bfc3-1afce65aee64-kube-api-access-wl5g7\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.376402 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53179c75-37bd-4459-bfc3-1afce65aee64-run-httpd\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.381387 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.382525 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53179c75-37bd-4459-bfc3-1afce65aee64-log-httpd\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.384419 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.390038 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.399938 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl5g7\" (UniqueName: \"kubernetes.io/projected/53179c75-37bd-4459-bfc3-1afce65aee64-kube-api-access-wl5g7\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.406422 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-config-data\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.414342 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.417237 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-scripts\") pod \"ceilometer-0\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.449699 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-5dc988865-sv2ln"] Dec 01 18:55:26 crc kubenswrapper[4935]: W1201 18:55:26.485352 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod750735d4_ef3a_4fad_b258_13bd36897efa.slice/crio-fe9e836fcbb6203998c7a679f458959bedf2591c74ee79b5cf392a8f43d56bf4 WatchSource:0}: Error finding container fe9e836fcbb6203998c7a679f458959bedf2591c74ee79b5cf392a8f43d56bf4: Status 404 returned error can't find the container with id fe9e836fcbb6203998c7a679f458959bedf2591c74ee79b5cf392a8f43d56bf4 Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.584124 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.697825 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c30a1f35-70d9-42c9-9bed-35ab762851ad" path="/var/lib/kubelet/pods/c30a1f35-70d9-42c9-9bed-35ab762851ad/volumes" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.699048 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.751849 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-7b798c567c-9slfq"] Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.787389 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688b9f5b49-tr78h"] Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.819259 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-75b65c59cf-g7mdb"] Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.821622 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d586b231-e06e-4111-8f29-0b8d7c12eccc-httpd-run\") pod \"d586b231-e06e-4111-8f29-0b8d7c12eccc\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.821666 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d586b231-e06e-4111-8f29-0b8d7c12eccc-logs\") pod \"d586b231-e06e-4111-8f29-0b8d7c12eccc\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.821731 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2svm\" (UniqueName: \"kubernetes.io/projected/d586b231-e06e-4111-8f29-0b8d7c12eccc-kube-api-access-l2svm\") pod \"d586b231-e06e-4111-8f29-0b8d7c12eccc\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.821763 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-combined-ca-bundle\") pod \"d586b231-e06e-4111-8f29-0b8d7c12eccc\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.821801 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"d586b231-e06e-4111-8f29-0b8d7c12eccc\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.821890 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-scripts\") pod \"d586b231-e06e-4111-8f29-0b8d7c12eccc\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.821928 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-internal-tls-certs\") pod \"d586b231-e06e-4111-8f29-0b8d7c12eccc\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.821957 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-config-data\") pod \"d586b231-e06e-4111-8f29-0b8d7c12eccc\" (UID: \"d586b231-e06e-4111-8f29-0b8d7c12eccc\") " Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.822033 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d586b231-e06e-4111-8f29-0b8d7c12eccc-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d586b231-e06e-4111-8f29-0b8d7c12eccc" (UID: "d586b231-e06e-4111-8f29-0b8d7c12eccc"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.823465 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d586b231-e06e-4111-8f29-0b8d7c12eccc-logs" (OuterVolumeSpecName: "logs") pod "d586b231-e06e-4111-8f29-0b8d7c12eccc" (UID: "d586b231-e06e-4111-8f29-0b8d7c12eccc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.823808 4935 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d586b231-e06e-4111-8f29-0b8d7c12eccc-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.823831 4935 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d586b231-e06e-4111-8f29-0b8d7c12eccc-logs\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.826434 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d586b231-e06e-4111-8f29-0b8d7c12eccc-kube-api-access-l2svm" (OuterVolumeSpecName: "kube-api-access-l2svm") pod "d586b231-e06e-4111-8f29-0b8d7c12eccc" (UID: "d586b231-e06e-4111-8f29-0b8d7c12eccc"). InnerVolumeSpecName "kube-api-access-l2svm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.834315 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "d586b231-e06e-4111-8f29-0b8d7c12eccc" (UID: "d586b231-e06e-4111-8f29-0b8d7c12eccc"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.836487 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-scripts" (OuterVolumeSpecName: "scripts") pod "d586b231-e06e-4111-8f29-0b8d7c12eccc" (UID: "d586b231-e06e-4111-8f29-0b8d7c12eccc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.878526 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d586b231-e06e-4111-8f29-0b8d7c12eccc" (UID: "d586b231-e06e-4111-8f29-0b8d7c12eccc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.906398 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-config-data" (OuterVolumeSpecName: "config-data") pod "d586b231-e06e-4111-8f29-0b8d7c12eccc" (UID: "d586b231-e06e-4111-8f29-0b8d7c12eccc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.927179 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2svm\" (UniqueName: \"kubernetes.io/projected/d586b231-e06e-4111-8f29-0b8d7c12eccc-kube-api-access-l2svm\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.927206 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.927260 4935 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.927274 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.927283 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.929627 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d586b231-e06e-4111-8f29-0b8d7c12eccc" (UID: "d586b231-e06e-4111-8f29-0b8d7c12eccc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:26 crc kubenswrapper[4935]: I1201 18:55:26.982417 4935 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.030107 4935 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.030131 4935 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d586b231-e06e-4111-8f29-0b8d7c12eccc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.107678 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" event={"ID":"236b38ba-f435-4e71-9777-a15c545ce185","Type":"ContainerStarted","Data":"0e5e5ba96cc7c7827546a129cda102572667611f48c550b87af8813c5c078496"} Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.107727 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" event={"ID":"236b38ba-f435-4e71-9777-a15c545ce185","Type":"ContainerStarted","Data":"31eac2740294742fd25d51740cd50a97bdda9d90b53aa28b8f3c15c450f9d0a7"} Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.126344 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-75b65c59cf-g7mdb" event={"ID":"3394c6e9-bf83-4326-885f-484ebbce1d4d","Type":"ContainerStarted","Data":"95cfa1e065a8e72ed565bdf8088540fa6e2d9810f9cc34ef42be10464ffa467b"} Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.146696 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6b89d75d8c-8d6z5" event={"ID":"d97b7792-f596-4358-8b02-1ae1368ac68d","Type":"ContainerStarted","Data":"c616103e587de8b0877bce74610cef01af9529bac2f5819770b5ed0d0ec924cf"} Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.147407 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.147451 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.154028 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d586b231-e06e-4111-8f29-0b8d7c12eccc","Type":"ContainerDied","Data":"519bd4cbf19868850c86bf554e1d622bfd33819495879fb217e04c3296458509"} Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.154077 4935 scope.go:117] "RemoveContainer" containerID="bf23f543147880e332d6f460edcee9195c5886c4de4b7b1053a708774872c4e5" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.154278 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.164876 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7b798c567c-9slfq" event={"ID":"67884ebb-46bf-417e-b499-776f74720d64","Type":"ContainerStarted","Data":"0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8"} Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.164915 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7b798c567c-9slfq" event={"ID":"67884ebb-46bf-417e-b499-776f74720d64","Type":"ContainerStarted","Data":"73f7ac0269667294b97f2112dcc3b20b5329a96fad806ec0981852e419f2b7c4"} Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.165760 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.181258 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-5dc988865-sv2ln" event={"ID":"750735d4-ef3a-4fad-b258-13bd36897efa","Type":"ContainerStarted","Data":"fe9e836fcbb6203998c7a679f458959bedf2591c74ee79b5cf392a8f43d56bf4"} Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.181746 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-6b89d75d8c-8d6z5" podStartSLOduration=11.181727143 podStartE2EDuration="11.181727143s" podCreationTimestamp="2025-12-01 18:55:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:27.166107988 +0000 UTC m=+1541.187737247" watchObservedRunningTime="2025-12-01 18:55:27.181727143 +0000 UTC m=+1541.203356402" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.217646 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.217985 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="e9727d7a-7291-4a7a-9398-dddf85dd8d38" containerName="glance-log" containerID="cri-o://32027a8c777a3393338fb25e0d927ed32990f30dcb0032ac16c7f389122ccc6e" gracePeriod=30 Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.218598 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="e9727d7a-7291-4a7a-9398-dddf85dd8d38" containerName="glance-httpd" containerID="cri-o://bf1a46b894937ea94e3405544c99c12f78a9869aff8512e2b54edf9ca9e05462" gracePeriod=30 Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.221431 4935 scope.go:117] "RemoveContainer" containerID="da3dd5309546b5603d19f4c86102b686f99bf6418473290102ee266321691dde" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.222877 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-7b798c567c-9slfq" podStartSLOduration=9.222865368 podStartE2EDuration="9.222865368s" podCreationTimestamp="2025-12-01 18:55:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:27.194399295 +0000 UTC m=+1541.216028554" watchObservedRunningTime="2025-12-01 18:55:27.222865368 +0000 UTC m=+1541.244494627" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.250787 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.277922 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.295214 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:55:27 crc kubenswrapper[4935]: E1201 18:55:27.295741 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d586b231-e06e-4111-8f29-0b8d7c12eccc" containerName="glance-httpd" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.295754 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d586b231-e06e-4111-8f29-0b8d7c12eccc" containerName="glance-httpd" Dec 01 18:55:27 crc kubenswrapper[4935]: E1201 18:55:27.295808 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d586b231-e06e-4111-8f29-0b8d7c12eccc" containerName="glance-log" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.295817 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d586b231-e06e-4111-8f29-0b8d7c12eccc" containerName="glance-log" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.296010 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="d586b231-e06e-4111-8f29-0b8d7c12eccc" containerName="glance-httpd" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.296020 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="d586b231-e06e-4111-8f29-0b8d7c12eccc" containerName="glance-log" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.306793 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.313424 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.315653 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.315879 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.330231 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-6674fffddb-zqfh2"] Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.351712 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-764bd4bf4b-gnb6w"] Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.424240 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-69cfbb4c64-24bqh"] Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.451804 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.451858 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-logs\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.451942 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.451967 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.451985 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shc4b\" (UniqueName: \"kubernetes.io/projected/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-kube-api-access-shc4b\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.452012 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.452057 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.452126 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.460542 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:27 crc kubenswrapper[4935]: W1201 18:55:27.498500 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53179c75_37bd_4459_bfc3_1afce65aee64.slice/crio-15ae3289c29a2f7e1ff7225288bede3aee931170ce99cf9017c3ebc1dd878f53 WatchSource:0}: Error finding container 15ae3289c29a2f7e1ff7225288bede3aee931170ce99cf9017c3ebc1dd878f53: Status 404 returned error can't find the container with id 15ae3289c29a2f7e1ff7225288bede3aee931170ce99cf9017c3ebc1dd878f53 Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.553811 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.553903 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.553952 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.553977 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-logs\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.554045 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.554067 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.554085 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shc4b\" (UniqueName: \"kubernetes.io/projected/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-kube-api-access-shc4b\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.554113 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.554623 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.554770 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-logs\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.554944 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.559803 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.564882 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.570435 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.572840 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.580576 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shc4b\" (UniqueName: \"kubernetes.io/projected/3b9ac927-63ca-47c1-b78c-b93dae9abdb8-kube-api-access-shc4b\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.637033 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"3b9ac927-63ca-47c1-b78c-b93dae9abdb8\") " pod="openstack/glance-default-internal-api-0" Dec 01 18:55:27 crc kubenswrapper[4935]: I1201 18:55:27.881345 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.294064 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-69cfbb4c64-24bqh" event={"ID":"ce0e14b6-6e7f-467a-bad9-9479311d6c89","Type":"ContainerStarted","Data":"a6b4888dce55132e1517ff9655388ed60e5f97f8a462f38516f31e333f2ad680"} Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.294599 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-69cfbb4c64-24bqh" event={"ID":"ce0e14b6-6e7f-467a-bad9-9479311d6c89","Type":"ContainerStarted","Data":"aa2a321367438e8c7f189af66a93deda58f1f2a6897407e214c4225649e51244"} Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.296934 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.340222 4935 generic.go:334] "Generic (PLEG): container finished" podID="236b38ba-f435-4e71-9777-a15c545ce185" containerID="0e5e5ba96cc7c7827546a129cda102572667611f48c550b87af8813c5c078496" exitCode=0 Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.340977 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" event={"ID":"236b38ba-f435-4e71-9777-a15c545ce185","Type":"ContainerDied","Data":"0e5e5ba96cc7c7827546a129cda102572667611f48c550b87af8813c5c078496"} Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.341015 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" event={"ID":"236b38ba-f435-4e71-9777-a15c545ce185","Type":"ContainerStarted","Data":"a2af2c640063e95aae37a7a579df7b5de4b16fa423d7522b4a0a09f5d754b14a"} Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.343254 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-69cfbb4c64-24bqh" podStartSLOduration=3.343230257 podStartE2EDuration="3.343230257s" podCreationTimestamp="2025-12-01 18:55:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:28.336469327 +0000 UTC m=+1542.358098596" watchObservedRunningTime="2025-12-01 18:55:28.343230257 +0000 UTC m=+1542.364859516" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.349225 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.357746 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-6674fffddb-zqfh2" event={"ID":"2b1755ee-18df-46ef-be6d-b81b7967d831","Type":"ContainerStarted","Data":"082103566ad9323be7fec58e2865592286af1490c34f333110d47aae39dd45b7"} Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.379120 4935 generic.go:334] "Generic (PLEG): container finished" podID="e9727d7a-7291-4a7a-9398-dddf85dd8d38" containerID="32027a8c777a3393338fb25e0d927ed32990f30dcb0032ac16c7f389122ccc6e" exitCode=143 Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.380872 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e9727d7a-7291-4a7a-9398-dddf85dd8d38","Type":"ContainerDied","Data":"32027a8c777a3393338fb25e0d927ed32990f30dcb0032ac16c7f389122ccc6e"} Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.386620 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" event={"ID":"b7f13e0e-b593-44c8-b015-3bb2d32b896a","Type":"ContainerStarted","Data":"dc66717073546adec236e4b21f0e90fceda1e9cde856b5a26c7a321cf27fd215"} Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.395790 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53179c75-37bd-4459-bfc3-1afce65aee64","Type":"ContainerStarted","Data":"15ae3289c29a2f7e1ff7225288bede3aee931170ce99cf9017c3ebc1dd878f53"} Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.417597 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" podStartSLOduration=10.417576071 podStartE2EDuration="10.417576071s" podCreationTimestamp="2025-12-01 18:55:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:28.397136728 +0000 UTC m=+1542.418765987" watchObservedRunningTime="2025-12-01 18:55:28.417576071 +0000 UTC m=+1542.439205340" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.538815 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d586b231-e06e-4111-8f29-0b8d7c12eccc" path="/var/lib/kubelet/pods/d586b231-e06e-4111-8f29-0b8d7c12eccc/volumes" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.586947 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 18:55:28 crc kubenswrapper[4935]: W1201 18:55:28.595948 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b9ac927_63ca_47c1_b78c_b93dae9abdb8.slice/crio-120278352bd9c87b19ab54fd029ae02f9c87bfde8402e19eaa001012d37eb09b WatchSource:0}: Error finding container 120278352bd9c87b19ab54fd029ae02f9c87bfde8402e19eaa001012d37eb09b: Status 404 returned error can't find the container with id 120278352bd9c87b19ab54fd029ae02f9c87bfde8402e19eaa001012d37eb09b Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.776329 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-75b65c59cf-g7mdb"] Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.790814 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-5dc988865-sv2ln"] Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.846188 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-f77cd874b-zfsrx"] Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.848197 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.851639 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-api-internal-svc" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.851786 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-api-public-svc" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.861431 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-f77cd874b-zfsrx"] Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.882915 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-558b54d8fc-h7892"] Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.885520 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.887791 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-cfnapi-internal-svc" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.889996 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-cfnapi-public-svc" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.921290 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-558b54d8fc-h7892"] Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.928686 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c65ql\" (UniqueName: \"kubernetes.io/projected/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-kube-api-access-c65ql\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.928742 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-combined-ca-bundle\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.928788 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-internal-tls-certs\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.928844 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-public-tls-certs\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.928871 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-config-data\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:28 crc kubenswrapper[4935]: I1201 18:55:28.928908 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-config-data-custom\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.032873 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxdzc\" (UniqueName: \"kubernetes.io/projected/49f9e82b-286f-42b5-b006-5fe38a758159-kube-api-access-qxdzc\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.032951 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-config-data\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.032980 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-public-tls-certs\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.033396 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-config-data\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.033459 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-public-tls-certs\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.033488 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-combined-ca-bundle\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.033521 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-config-data-custom\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.034017 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c65ql\" (UniqueName: \"kubernetes.io/projected/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-kube-api-access-c65ql\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.034053 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-config-data-custom\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.034097 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-combined-ca-bundle\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.034270 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-internal-tls-certs\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.034370 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-internal-tls-certs\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.041432 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-config-data\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.041866 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-public-tls-certs\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.042177 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-internal-tls-certs\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.043043 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-combined-ca-bundle\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.044665 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-config-data-custom\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.054223 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c65ql\" (UniqueName: \"kubernetes.io/projected/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-kube-api-access-c65ql\") pod \"heat-api-f77cd874b-zfsrx\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.137368 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-config-data-custom\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.138040 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-internal-tls-certs\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.138130 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxdzc\" (UniqueName: \"kubernetes.io/projected/49f9e82b-286f-42b5-b006-5fe38a758159-kube-api-access-qxdzc\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.138275 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-config-data\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.138375 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-public-tls-certs\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.138436 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-combined-ca-bundle\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.142015 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-config-data-custom\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.143939 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-config-data\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.148231 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-public-tls-certs\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.148377 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-internal-tls-certs\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.156058 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-combined-ca-bundle\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.158800 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxdzc\" (UniqueName: \"kubernetes.io/projected/49f9e82b-286f-42b5-b006-5fe38a758159-kube-api-access-qxdzc\") pod \"heat-cfnapi-558b54d8fc-h7892\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.186648 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.207402 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.410219 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3b9ac927-63ca-47c1-b78c-b93dae9abdb8","Type":"ContainerStarted","Data":"120278352bd9c87b19ab54fd029ae02f9c87bfde8402e19eaa001012d37eb09b"} Dec 01 18:55:29 crc kubenswrapper[4935]: I1201 18:55:29.412174 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53179c75-37bd-4459-bfc3-1afce65aee64","Type":"ContainerStarted","Data":"9f2acc5d1f0ca6836312288c97e25001e65b4e4ab92d22ff3019a8fde01203e8"} Dec 01 18:55:30 crc kubenswrapper[4935]: I1201 18:55:30.432909 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3b9ac927-63ca-47c1-b78c-b93dae9abdb8","Type":"ContainerStarted","Data":"2d64a6615836c8302bc6dff52be649bd7c918c6e96a53716d6f56938e98f4ee7"} Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.406830 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.507058 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" event={"ID":"b7f13e0e-b593-44c8-b015-3bb2d32b896a","Type":"ContainerStarted","Data":"68a605e5e36dadc5e347126b10b5ae05998586c18e6172767e3979b5d68aabe0"} Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.509122 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.542769 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-config-data\") pod \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.542928 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9727d7a-7291-4a7a-9398-dddf85dd8d38-logs\") pod \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.542969 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bh474\" (UniqueName: \"kubernetes.io/projected/e9727d7a-7291-4a7a-9398-dddf85dd8d38-kube-api-access-bh474\") pod \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.543238 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.543289 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-combined-ca-bundle\") pod \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.543344 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e9727d7a-7291-4a7a-9398-dddf85dd8d38-httpd-run\") pod \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.543381 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-scripts\") pod \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.543419 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-public-tls-certs\") pod \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\" (UID: \"e9727d7a-7291-4a7a-9398-dddf85dd8d38\") " Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.543889 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9727d7a-7291-4a7a-9398-dddf85dd8d38-logs" (OuterVolumeSpecName: "logs") pod "e9727d7a-7291-4a7a-9398-dddf85dd8d38" (UID: "e9727d7a-7291-4a7a-9398-dddf85dd8d38"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.544299 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9727d7a-7291-4a7a-9398-dddf85dd8d38-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e9727d7a-7291-4a7a-9398-dddf85dd8d38" (UID: "e9727d7a-7291-4a7a-9398-dddf85dd8d38"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.544822 4935 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e9727d7a-7291-4a7a-9398-dddf85dd8d38-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.544836 4935 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9727d7a-7291-4a7a-9398-dddf85dd8d38-logs\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.568559 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9727d7a-7291-4a7a-9398-dddf85dd8d38-kube-api-access-bh474" (OuterVolumeSpecName: "kube-api-access-bh474") pod "e9727d7a-7291-4a7a-9398-dddf85dd8d38" (UID: "e9727d7a-7291-4a7a-9398-dddf85dd8d38"). InnerVolumeSpecName "kube-api-access-bh474". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.570456 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "e9727d7a-7291-4a7a-9398-dddf85dd8d38" (UID: "e9727d7a-7291-4a7a-9398-dddf85dd8d38"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.571833 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-api-75b65c59cf-g7mdb" podUID="3394c6e9-bf83-4326-885f-484ebbce1d4d" containerName="heat-api" containerID="cri-o://2ef47ee36414ec0bce100005fe7209985f30e498887f4f4b8a22c0eb762abfc4" gracePeriod=60 Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.571994 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.576704 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-scripts" (OuterVolumeSpecName: "scripts") pod "e9727d7a-7291-4a7a-9398-dddf85dd8d38" (UID: "e9727d7a-7291-4a7a-9398-dddf85dd8d38"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.588263 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" podStartSLOduration=3.106096156 podStartE2EDuration="6.588242596s" podCreationTimestamp="2025-12-01 18:55:25 +0000 UTC" firstStartedPulling="2025-12-01 18:55:27.394193279 +0000 UTC m=+1541.415822538" lastFinishedPulling="2025-12-01 18:55:30.876339719 +0000 UTC m=+1544.897968978" observedRunningTime="2025-12-01 18:55:31.570440774 +0000 UTC m=+1545.592070033" watchObservedRunningTime="2025-12-01 18:55:31.588242596 +0000 UTC m=+1545.609871855" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.627966 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53179c75-37bd-4459-bfc3-1afce65aee64","Type":"ContainerStarted","Data":"7f451ca0d433d4929ea9339fdf33cd8eee9852ae3c51254352d35235bc5d48be"} Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.632697 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-5dc988865-sv2ln" event={"ID":"750735d4-ef3a-4fad-b258-13bd36897efa","Type":"ContainerStarted","Data":"681771bf3ab9c27c236f6a6142a1f2d472909dc1b0cb3210a71554340d8225e5"} Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.633067 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-cfnapi-5dc988865-sv2ln" podUID="750735d4-ef3a-4fad-b258-13bd36897efa" containerName="heat-cfnapi" containerID="cri-o://681771bf3ab9c27c236f6a6142a1f2d472909dc1b0cb3210a71554340d8225e5" gracePeriod=60 Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.633443 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.642284 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9727d7a-7291-4a7a-9398-dddf85dd8d38" (UID: "e9727d7a-7291-4a7a-9398-dddf85dd8d38"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.644040 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-6674fffddb-zqfh2" event={"ID":"2b1755ee-18df-46ef-be6d-b81b7967d831","Type":"ContainerStarted","Data":"50f75d0c57a1f9f377523a2eeea6fb49154b7da7a336d3c42db1ba84c468e344"} Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.645102 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.651077 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-75b65c59cf-g7mdb" podStartSLOduration=9.561392761 podStartE2EDuration="13.651058313s" podCreationTimestamp="2025-12-01 18:55:18 +0000 UTC" firstStartedPulling="2025-12-01 18:55:26.784601733 +0000 UTC m=+1540.806230992" lastFinishedPulling="2025-12-01 18:55:30.874267285 +0000 UTC m=+1544.895896544" observedRunningTime="2025-12-01 18:55:31.624703637 +0000 UTC m=+1545.646332896" watchObservedRunningTime="2025-12-01 18:55:31.651058313 +0000 UTC m=+1545.672687572" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.651250 4935 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.651272 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.651285 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.651295 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bh474\" (UniqueName: \"kubernetes.io/projected/e9727d7a-7291-4a7a-9398-dddf85dd8d38-kube-api-access-bh474\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.678962 4935 generic.go:334] "Generic (PLEG): container finished" podID="e9727d7a-7291-4a7a-9398-dddf85dd8d38" containerID="bf1a46b894937ea94e3405544c99c12f78a9869aff8512e2b54edf9ca9e05462" exitCode=0 Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.679006 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e9727d7a-7291-4a7a-9398-dddf85dd8d38","Type":"ContainerDied","Data":"bf1a46b894937ea94e3405544c99c12f78a9869aff8512e2b54edf9ca9e05462"} Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.679034 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e9727d7a-7291-4a7a-9398-dddf85dd8d38","Type":"ContainerDied","Data":"3612ff548c1df4b50c4e6ba913b04d9616163d65cd15f62ca1e44a91b33d5e65"} Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.679051 4935 scope.go:117] "RemoveContainer" containerID="bf1a46b894937ea94e3405544c99c12f78a9869aff8512e2b54edf9ca9e05462" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.679227 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.688933 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-5dc988865-sv2ln" podStartSLOduration=9.306069057 podStartE2EDuration="13.688909657s" podCreationTimestamp="2025-12-01 18:55:18 +0000 UTC" firstStartedPulling="2025-12-01 18:55:26.490760124 +0000 UTC m=+1540.512389383" lastFinishedPulling="2025-12-01 18:55:30.873600724 +0000 UTC m=+1544.895229983" observedRunningTime="2025-12-01 18:55:31.648750162 +0000 UTC m=+1545.670379421" watchObservedRunningTime="2025-12-01 18:55:31.688909657 +0000 UTC m=+1545.710538916" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.695368 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-6674fffddb-zqfh2" podStartSLOduration=3.128290035 podStartE2EDuration="6.695357147s" podCreationTimestamp="2025-12-01 18:55:25 +0000 UTC" firstStartedPulling="2025-12-01 18:55:27.306557062 +0000 UTC m=+1541.328186321" lastFinishedPulling="2025-12-01 18:55:30.873624174 +0000 UTC m=+1544.895253433" observedRunningTime="2025-12-01 18:55:31.666969397 +0000 UTC m=+1545.688598656" watchObservedRunningTime="2025-12-01 18:55:31.695357147 +0000 UTC m=+1545.716986406" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.707500 4935 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.737541 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-558b54d8fc-h7892"] Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.756942 4935 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.784763 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e9727d7a-7291-4a7a-9398-dddf85dd8d38" (UID: "e9727d7a-7291-4a7a-9398-dddf85dd8d38"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.790352 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-config-data" (OuterVolumeSpecName: "config-data") pod "e9727d7a-7291-4a7a-9398-dddf85dd8d38" (UID: "e9727d7a-7291-4a7a-9398-dddf85dd8d38"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.858934 4935 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.858975 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9727d7a-7291-4a7a-9398-dddf85dd8d38-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.871386 4935 scope.go:117] "RemoveContainer" containerID="32027a8c777a3393338fb25e0d927ed32990f30dcb0032ac16c7f389122ccc6e" Dec 01 18:55:31 crc kubenswrapper[4935]: I1201 18:55:31.882128 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-f77cd874b-zfsrx"] Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.201814 4935 scope.go:117] "RemoveContainer" containerID="bf1a46b894937ea94e3405544c99c12f78a9869aff8512e2b54edf9ca9e05462" Dec 01 18:55:32 crc kubenswrapper[4935]: E1201 18:55:32.203180 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf1a46b894937ea94e3405544c99c12f78a9869aff8512e2b54edf9ca9e05462\": container with ID starting with bf1a46b894937ea94e3405544c99c12f78a9869aff8512e2b54edf9ca9e05462 not found: ID does not exist" containerID="bf1a46b894937ea94e3405544c99c12f78a9869aff8512e2b54edf9ca9e05462" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.203208 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf1a46b894937ea94e3405544c99c12f78a9869aff8512e2b54edf9ca9e05462"} err="failed to get container status \"bf1a46b894937ea94e3405544c99c12f78a9869aff8512e2b54edf9ca9e05462\": rpc error: code = NotFound desc = could not find container \"bf1a46b894937ea94e3405544c99c12f78a9869aff8512e2b54edf9ca9e05462\": container with ID starting with bf1a46b894937ea94e3405544c99c12f78a9869aff8512e2b54edf9ca9e05462 not found: ID does not exist" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.203229 4935 scope.go:117] "RemoveContainer" containerID="32027a8c777a3393338fb25e0d927ed32990f30dcb0032ac16c7f389122ccc6e" Dec 01 18:55:32 crc kubenswrapper[4935]: E1201 18:55:32.203809 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32027a8c777a3393338fb25e0d927ed32990f30dcb0032ac16c7f389122ccc6e\": container with ID starting with 32027a8c777a3393338fb25e0d927ed32990f30dcb0032ac16c7f389122ccc6e not found: ID does not exist" containerID="32027a8c777a3393338fb25e0d927ed32990f30dcb0032ac16c7f389122ccc6e" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.203831 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32027a8c777a3393338fb25e0d927ed32990f30dcb0032ac16c7f389122ccc6e"} err="failed to get container status \"32027a8c777a3393338fb25e0d927ed32990f30dcb0032ac16c7f389122ccc6e\": rpc error: code = NotFound desc = could not find container \"32027a8c777a3393338fb25e0d927ed32990f30dcb0032ac16c7f389122ccc6e\": container with ID starting with 32027a8c777a3393338fb25e0d927ed32990f30dcb0032ac16c7f389122ccc6e not found: ID does not exist" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.205701 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.219723 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.229854 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:55:32 crc kubenswrapper[4935]: E1201 18:55:32.230388 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9727d7a-7291-4a7a-9398-dddf85dd8d38" containerName="glance-httpd" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.230403 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9727d7a-7291-4a7a-9398-dddf85dd8d38" containerName="glance-httpd" Dec 01 18:55:32 crc kubenswrapper[4935]: E1201 18:55:32.230424 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9727d7a-7291-4a7a-9398-dddf85dd8d38" containerName="glance-log" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.230430 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9727d7a-7291-4a7a-9398-dddf85dd8d38" containerName="glance-log" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.230636 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9727d7a-7291-4a7a-9398-dddf85dd8d38" containerName="glance-log" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.230661 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9727d7a-7291-4a7a-9398-dddf85dd8d38" containerName="glance-httpd" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.232074 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.239694 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.239919 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.275198 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.381237 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c06fe677-8ca0-4f63-ac6a-b83590981bca-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.381343 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.381515 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c06fe677-8ca0-4f63-ac6a-b83590981bca-scripts\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.381537 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c06fe677-8ca0-4f63-ac6a-b83590981bca-logs\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.381592 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c06fe677-8ca0-4f63-ac6a-b83590981bca-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.381634 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zrbc\" (UniqueName: \"kubernetes.io/projected/c06fe677-8ca0-4f63-ac6a-b83590981bca-kube-api-access-2zrbc\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.381661 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c06fe677-8ca0-4f63-ac6a-b83590981bca-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.381702 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c06fe677-8ca0-4f63-ac6a-b83590981bca-config-data\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.418609 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.459775 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6b89d75d8c-8d6z5" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.484559 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c06fe677-8ca0-4f63-ac6a-b83590981bca-scripts\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.484605 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c06fe677-8ca0-4f63-ac6a-b83590981bca-logs\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.484657 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c06fe677-8ca0-4f63-ac6a-b83590981bca-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.484690 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zrbc\" (UniqueName: \"kubernetes.io/projected/c06fe677-8ca0-4f63-ac6a-b83590981bca-kube-api-access-2zrbc\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.484712 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c06fe677-8ca0-4f63-ac6a-b83590981bca-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.484751 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c06fe677-8ca0-4f63-ac6a-b83590981bca-config-data\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.484848 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c06fe677-8ca0-4f63-ac6a-b83590981bca-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.484894 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.485753 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.485878 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c06fe677-8ca0-4f63-ac6a-b83590981bca-logs\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.486103 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c06fe677-8ca0-4f63-ac6a-b83590981bca-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.491402 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c06fe677-8ca0-4f63-ac6a-b83590981bca-config-data\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.491946 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c06fe677-8ca0-4f63-ac6a-b83590981bca-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.492239 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c06fe677-8ca0-4f63-ac6a-b83590981bca-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.492507 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c06fe677-8ca0-4f63-ac6a-b83590981bca-scripts\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.525951 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zrbc\" (UniqueName: \"kubernetes.io/projected/c06fe677-8ca0-4f63-ac6a-b83590981bca-kube-api-access-2zrbc\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.529606 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9727d7a-7291-4a7a-9398-dddf85dd8d38" path="/var/lib/kubelet/pods/e9727d7a-7291-4a7a-9398-dddf85dd8d38/volumes" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.538384 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"c06fe677-8ca0-4f63-ac6a-b83590981bca\") " pod="openstack/glance-default-external-api-0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.713767 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53179c75-37bd-4459-bfc3-1afce65aee64","Type":"ContainerStarted","Data":"428009dfcaa8251b3ed4f345e7562836efe53d977ea58004e312fd1f5686312b"} Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.719667 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-558b54d8fc-h7892" event={"ID":"49f9e82b-286f-42b5-b006-5fe38a758159","Type":"ContainerStarted","Data":"87eedf06818ffbee7cca3a4762e8e3abf4ca9b45d83938b0b4af5c2111cf6ab6"} Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.719710 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-558b54d8fc-h7892" event={"ID":"49f9e82b-286f-42b5-b006-5fe38a758159","Type":"ContainerStarted","Data":"e065b4250a439250a2e18b3f86cf4428ab104ba19e435ea7905ca56abd38308f"} Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.720134 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.726173 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3b9ac927-63ca-47c1-b78c-b93dae9abdb8","Type":"ContainerStarted","Data":"356979f27b9e462584e6dad15ce9e7a3625cd67e28a48d7cf357862c9911d389"} Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.737748 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-f77cd874b-zfsrx" event={"ID":"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa","Type":"ContainerStarted","Data":"48e6e8c486a39d490586dc18a525bd27d5957a99d7e1e8ee9b4281613c2a6d32"} Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.737812 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-f77cd874b-zfsrx" event={"ID":"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa","Type":"ContainerStarted","Data":"4e0522e890498c4bc375602de219f821322a04749380fc34680969e60e9d0dfb"} Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.738077 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.756663 4935 generic.go:334] "Generic (PLEG): container finished" podID="2b1755ee-18df-46ef-be6d-b81b7967d831" containerID="50f75d0c57a1f9f377523a2eeea6fb49154b7da7a336d3c42db1ba84c468e344" exitCode=1 Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.756796 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-6674fffddb-zqfh2" event={"ID":"2b1755ee-18df-46ef-be6d-b81b7967d831","Type":"ContainerDied","Data":"50f75d0c57a1f9f377523a2eeea6fb49154b7da7a336d3c42db1ba84c468e344"} Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.758049 4935 scope.go:117] "RemoveContainer" containerID="50f75d0c57a1f9f377523a2eeea6fb49154b7da7a336d3c42db1ba84c468e344" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.784809 4935 generic.go:334] "Generic (PLEG): container finished" podID="b7f13e0e-b593-44c8-b015-3bb2d32b896a" containerID="68a605e5e36dadc5e347126b10b5ae05998586c18e6172767e3979b5d68aabe0" exitCode=1 Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.784977 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" event={"ID":"b7f13e0e-b593-44c8-b015-3bb2d32b896a","Type":"ContainerDied","Data":"68a605e5e36dadc5e347126b10b5ae05998586c18e6172767e3979b5d68aabe0"} Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.786094 4935 scope.go:117] "RemoveContainer" containerID="68a605e5e36dadc5e347126b10b5ae05998586c18e6172767e3979b5d68aabe0" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.792762 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-75b65c59cf-g7mdb" event={"ID":"3394c6e9-bf83-4326-885f-484ebbce1d4d","Type":"ContainerStarted","Data":"2ef47ee36414ec0bce100005fe7209985f30e498887f4f4b8a22c0eb762abfc4"} Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.812690 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-558b54d8fc-h7892" podStartSLOduration=4.812662302 podStartE2EDuration="4.812662302s" podCreationTimestamp="2025-12-01 18:55:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:32.758878924 +0000 UTC m=+1546.780508173" watchObservedRunningTime="2025-12-01 18:55:32.812662302 +0000 UTC m=+1546.834291561" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.847398 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.847368417 podStartE2EDuration="5.847368417s" podCreationTimestamp="2025-12-01 18:55:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:32.834621402 +0000 UTC m=+1546.856250661" watchObservedRunningTime="2025-12-01 18:55:32.847368417 +0000 UTC m=+1546.868997676" Dec 01 18:55:32 crc kubenswrapper[4935]: I1201 18:55:32.868300 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 18:55:33 crc kubenswrapper[4935]: I1201 18:55:33.024273 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-f77cd874b-zfsrx" podStartSLOduration=5.02424795 podStartE2EDuration="5.02424795s" podCreationTimestamp="2025-12-01 18:55:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:32.894479797 +0000 UTC m=+1546.916109056" watchObservedRunningTime="2025-12-01 18:55:33.02424795 +0000 UTC m=+1547.045877209" Dec 01 18:55:33 crc kubenswrapper[4935]: I1201 18:55:33.806096 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-6674fffddb-zqfh2" event={"ID":"2b1755ee-18df-46ef-be6d-b81b7967d831","Type":"ContainerStarted","Data":"c1b6dd7dbb2212d154227588ddb181af90caecc9fb9ec5ac8c3912353a2e68d9"} Dec 01 18:55:33 crc kubenswrapper[4935]: I1201 18:55:33.810368 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" event={"ID":"b7f13e0e-b593-44c8-b015-3bb2d32b896a","Type":"ContainerStarted","Data":"4fbe75b7036e8c9316858dda35aba29a6b0bd39097c9c79033a66bea3fc1bca8"} Dec 01 18:55:33 crc kubenswrapper[4935]: I1201 18:55:33.811257 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:33 crc kubenswrapper[4935]: I1201 18:55:33.820182 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 18:55:33 crc kubenswrapper[4935]: I1201 18:55:33.840556 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:55:33 crc kubenswrapper[4935]: I1201 18:55:33.978210 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-d88l4"] Dec 01 18:55:33 crc kubenswrapper[4935]: I1201 18:55:33.978447 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6578955fd5-d88l4" podUID="693ddd06-a5c8-459d-8b68-d9e7a734809a" containerName="dnsmasq-dns" containerID="cri-o://33c0abbb7a476d4388ac05e968ed5b306cd2bb060824d05c5e807f707b6ed370" gracePeriod=10 Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.687743 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.794519 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.802781 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-ovsdbserver-sb\") pod \"693ddd06-a5c8-459d-8b68-d9e7a734809a\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.802956 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-dns-svc\") pod \"693ddd06-a5c8-459d-8b68-d9e7a734809a\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.802978 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-ovsdbserver-nb\") pod \"693ddd06-a5c8-459d-8b68-d9e7a734809a\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.803052 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-config\") pod \"693ddd06-a5c8-459d-8b68-d9e7a734809a\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.803095 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-dns-swift-storage-0\") pod \"693ddd06-a5c8-459d-8b68-d9e7a734809a\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.803269 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6r9ck\" (UniqueName: \"kubernetes.io/projected/693ddd06-a5c8-459d-8b68-d9e7a734809a-kube-api-access-6r9ck\") pod \"693ddd06-a5c8-459d-8b68-d9e7a734809a\" (UID: \"693ddd06-a5c8-459d-8b68-d9e7a734809a\") " Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.828920 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/693ddd06-a5c8-459d-8b68-d9e7a734809a-kube-api-access-6r9ck" (OuterVolumeSpecName: "kube-api-access-6r9ck") pod "693ddd06-a5c8-459d-8b68-d9e7a734809a" (UID: "693ddd06-a5c8-459d-8b68-d9e7a734809a"). InnerVolumeSpecName "kube-api-access-6r9ck". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.877993 4935 generic.go:334] "Generic (PLEG): container finished" podID="693ddd06-a5c8-459d-8b68-d9e7a734809a" containerID="33c0abbb7a476d4388ac05e968ed5b306cd2bb060824d05c5e807f707b6ed370" exitCode=0 Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.878534 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-d88l4" event={"ID":"693ddd06-a5c8-459d-8b68-d9e7a734809a","Type":"ContainerDied","Data":"33c0abbb7a476d4388ac05e968ed5b306cd2bb060824d05c5e807f707b6ed370"} Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.878649 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-d88l4" event={"ID":"693ddd06-a5c8-459d-8b68-d9e7a734809a","Type":"ContainerDied","Data":"ae047209bf890e4c768519a3153c3c1642c762279f641e5214cda1b31bf2e11e"} Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.878553 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-d88l4" Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.878671 4935 scope.go:117] "RemoveContainer" containerID="33c0abbb7a476d4388ac05e968ed5b306cd2bb060824d05c5e807f707b6ed370" Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.881081 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c06fe677-8ca0-4f63-ac6a-b83590981bca","Type":"ContainerStarted","Data":"eb49791b477fa2f87628a7e2493d56e706396d32049bf620cddfddfac649d996"} Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.881117 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c06fe677-8ca0-4f63-ac6a-b83590981bca","Type":"ContainerStarted","Data":"5013c8053ac9315660cd1715e7188d9a8d833a77c3d3376e0ea920759a99f9c1"} Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.885673 4935 generic.go:334] "Generic (PLEG): container finished" podID="2b1755ee-18df-46ef-be6d-b81b7967d831" containerID="c1b6dd7dbb2212d154227588ddb181af90caecc9fb9ec5ac8c3912353a2e68d9" exitCode=1 Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.885733 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-6674fffddb-zqfh2" event={"ID":"2b1755ee-18df-46ef-be6d-b81b7967d831","Type":"ContainerDied","Data":"c1b6dd7dbb2212d154227588ddb181af90caecc9fb9ec5ac8c3912353a2e68d9"} Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.886315 4935 scope.go:117] "RemoveContainer" containerID="c1b6dd7dbb2212d154227588ddb181af90caecc9fb9ec5ac8c3912353a2e68d9" Dec 01 18:55:34 crc kubenswrapper[4935]: E1201 18:55:34.886630 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-6674fffddb-zqfh2_openstack(2b1755ee-18df-46ef-be6d-b81b7967d831)\"" pod="openstack/heat-api-6674fffddb-zqfh2" podUID="2b1755ee-18df-46ef-be6d-b81b7967d831" Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.893283 4935 generic.go:334] "Generic (PLEG): container finished" podID="b7f13e0e-b593-44c8-b015-3bb2d32b896a" containerID="4fbe75b7036e8c9316858dda35aba29a6b0bd39097c9c79033a66bea3fc1bca8" exitCode=1 Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.893320 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" event={"ID":"b7f13e0e-b593-44c8-b015-3bb2d32b896a","Type":"ContainerDied","Data":"4fbe75b7036e8c9316858dda35aba29a6b0bd39097c9c79033a66bea3fc1bca8"} Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.893871 4935 scope.go:117] "RemoveContainer" containerID="4fbe75b7036e8c9316858dda35aba29a6b0bd39097c9c79033a66bea3fc1bca8" Dec 01 18:55:34 crc kubenswrapper[4935]: E1201 18:55:34.894113 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-764bd4bf4b-gnb6w_openstack(b7f13e0e-b593-44c8-b015-3bb2d32b896a)\"" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" podUID="b7f13e0e-b593-44c8-b015-3bb2d32b896a" Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.903400 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-config" (OuterVolumeSpecName: "config") pod "693ddd06-a5c8-459d-8b68-d9e7a734809a" (UID: "693ddd06-a5c8-459d-8b68-d9e7a734809a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.909516 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.909546 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6r9ck\" (UniqueName: \"kubernetes.io/projected/693ddd06-a5c8-459d-8b68-d9e7a734809a-kube-api-access-6r9ck\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.916687 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "693ddd06-a5c8-459d-8b68-d9e7a734809a" (UID: "693ddd06-a5c8-459d-8b68-d9e7a734809a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.928619 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "693ddd06-a5c8-459d-8b68-d9e7a734809a" (UID: "693ddd06-a5c8-459d-8b68-d9e7a734809a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.966430 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "693ddd06-a5c8-459d-8b68-d9e7a734809a" (UID: "693ddd06-a5c8-459d-8b68-d9e7a734809a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:34 crc kubenswrapper[4935]: I1201 18:55:34.968943 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "693ddd06-a5c8-459d-8b68-d9e7a734809a" (UID: "693ddd06-a5c8-459d-8b68-d9e7a734809a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.011375 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.011411 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.011421 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.011431 4935 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/693ddd06-a5c8-459d-8b68-d9e7a734809a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.092938 4935 scope.go:117] "RemoveContainer" containerID="67c9f2be32fcef81cef4f2e86e1d9b85b90c782d2ae2e7b8a50413862e3d1f3c" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.133858 4935 scope.go:117] "RemoveContainer" containerID="33c0abbb7a476d4388ac05e968ed5b306cd2bb060824d05c5e807f707b6ed370" Dec 01 18:55:35 crc kubenswrapper[4935]: E1201 18:55:35.137069 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33c0abbb7a476d4388ac05e968ed5b306cd2bb060824d05c5e807f707b6ed370\": container with ID starting with 33c0abbb7a476d4388ac05e968ed5b306cd2bb060824d05c5e807f707b6ed370 not found: ID does not exist" containerID="33c0abbb7a476d4388ac05e968ed5b306cd2bb060824d05c5e807f707b6ed370" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.137123 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33c0abbb7a476d4388ac05e968ed5b306cd2bb060824d05c5e807f707b6ed370"} err="failed to get container status \"33c0abbb7a476d4388ac05e968ed5b306cd2bb060824d05c5e807f707b6ed370\": rpc error: code = NotFound desc = could not find container \"33c0abbb7a476d4388ac05e968ed5b306cd2bb060824d05c5e807f707b6ed370\": container with ID starting with 33c0abbb7a476d4388ac05e968ed5b306cd2bb060824d05c5e807f707b6ed370 not found: ID does not exist" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.137196 4935 scope.go:117] "RemoveContainer" containerID="67c9f2be32fcef81cef4f2e86e1d9b85b90c782d2ae2e7b8a50413862e3d1f3c" Dec 01 18:55:35 crc kubenswrapper[4935]: E1201 18:55:35.138359 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67c9f2be32fcef81cef4f2e86e1d9b85b90c782d2ae2e7b8a50413862e3d1f3c\": container with ID starting with 67c9f2be32fcef81cef4f2e86e1d9b85b90c782d2ae2e7b8a50413862e3d1f3c not found: ID does not exist" containerID="67c9f2be32fcef81cef4f2e86e1d9b85b90c782d2ae2e7b8a50413862e3d1f3c" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.138412 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67c9f2be32fcef81cef4f2e86e1d9b85b90c782d2ae2e7b8a50413862e3d1f3c"} err="failed to get container status \"67c9f2be32fcef81cef4f2e86e1d9b85b90c782d2ae2e7b8a50413862e3d1f3c\": rpc error: code = NotFound desc = could not find container \"67c9f2be32fcef81cef4f2e86e1d9b85b90c782d2ae2e7b8a50413862e3d1f3c\": container with ID starting with 67c9f2be32fcef81cef4f2e86e1d9b85b90c782d2ae2e7b8a50413862e3d1f3c not found: ID does not exist" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.138459 4935 scope.go:117] "RemoveContainer" containerID="50f75d0c57a1f9f377523a2eeea6fb49154b7da7a336d3c42db1ba84c468e344" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.219886 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-d88l4"] Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.229261 4935 scope.go:117] "RemoveContainer" containerID="68a605e5e36dadc5e347126b10b5ae05998586c18e6172767e3979b5d68aabe0" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.231075 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-d88l4"] Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.905738 4935 scope.go:117] "RemoveContainer" containerID="c1b6dd7dbb2212d154227588ddb181af90caecc9fb9ec5ac8c3912353a2e68d9" Dec 01 18:55:35 crc kubenswrapper[4935]: E1201 18:55:35.906393 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-6674fffddb-zqfh2_openstack(2b1755ee-18df-46ef-be6d-b81b7967d831)\"" pod="openstack/heat-api-6674fffddb-zqfh2" podUID="2b1755ee-18df-46ef-be6d-b81b7967d831" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.910589 4935 scope.go:117] "RemoveContainer" containerID="4fbe75b7036e8c9316858dda35aba29a6b0bd39097c9c79033a66bea3fc1bca8" Dec 01 18:55:35 crc kubenswrapper[4935]: E1201 18:55:35.910771 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-764bd4bf4b-gnb6w_openstack(b7f13e0e-b593-44c8-b015-3bb2d32b896a)\"" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" podUID="b7f13e0e-b593-44c8-b015-3bb2d32b896a" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.914270 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53179c75-37bd-4459-bfc3-1afce65aee64","Type":"ContainerStarted","Data":"3470612512fefd9d8596e2946ccaeb0e6efcf837b325fcb2d1673dd049863fbe"} Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.914385 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="ceilometer-central-agent" containerID="cri-o://9f2acc5d1f0ca6836312288c97e25001e65b4e4ab92d22ff3019a8fde01203e8" gracePeriod=30 Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.914461 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.914484 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="ceilometer-notification-agent" containerID="cri-o://7f451ca0d433d4929ea9339fdf33cd8eee9852ae3c51254352d35235bc5d48be" gracePeriod=30 Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.914503 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="sg-core" containerID="cri-o://428009dfcaa8251b3ed4f345e7562836efe53d977ea58004e312fd1f5686312b" gracePeriod=30 Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.914560 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="proxy-httpd" containerID="cri-o://3470612512fefd9d8596e2946ccaeb0e6efcf837b325fcb2d1673dd049863fbe" gracePeriod=30 Dec 01 18:55:35 crc kubenswrapper[4935]: I1201 18:55:35.970300 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c06fe677-8ca0-4f63-ac6a-b83590981bca","Type":"ContainerStarted","Data":"11295228538aae2b519de88457cb25864f8e793f6349f9733999686b55179ab9"} Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.015757 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.900949405 podStartE2EDuration="10.01573136s" podCreationTimestamp="2025-12-01 18:55:26 +0000 UTC" firstStartedPulling="2025-12-01 18:55:27.511454894 +0000 UTC m=+1541.533084153" lastFinishedPulling="2025-12-01 18:55:34.626236849 +0000 UTC m=+1548.647866108" observedRunningTime="2025-12-01 18:55:36.005795092 +0000 UTC m=+1550.027424351" watchObservedRunningTime="2025-12-01 18:55:36.01573136 +0000 UTC m=+1550.037360619" Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.040419 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.040403575 podStartE2EDuration="4.040403575s" podCreationTimestamp="2025-12-01 18:55:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:36.026027369 +0000 UTC m=+1550.047656628" watchObservedRunningTime="2025-12-01 18:55:36.040403575 +0000 UTC m=+1550.062032834" Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.367321 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.367378 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.415386 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.526347 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="693ddd06-a5c8-459d-8b68-d9e7a734809a" path="/var/lib/kubelet/pods/693ddd06-a5c8-459d-8b68-d9e7a734809a/volumes" Dec 01 18:55:36 crc kubenswrapper[4935]: E1201 18:55:36.711636 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53179c75_37bd_4459_bfc3_1afce65aee64.slice/crio-conmon-9f2acc5d1f0ca6836312288c97e25001e65b4e4ab92d22ff3019a8fde01203e8.scope\": RecentStats: unable to find data in memory cache]" Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.983630 4935 generic.go:334] "Generic (PLEG): container finished" podID="53179c75-37bd-4459-bfc3-1afce65aee64" containerID="3470612512fefd9d8596e2946ccaeb0e6efcf837b325fcb2d1673dd049863fbe" exitCode=0 Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.983924 4935 generic.go:334] "Generic (PLEG): container finished" podID="53179c75-37bd-4459-bfc3-1afce65aee64" containerID="428009dfcaa8251b3ed4f345e7562836efe53d977ea58004e312fd1f5686312b" exitCode=2 Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.983932 4935 generic.go:334] "Generic (PLEG): container finished" podID="53179c75-37bd-4459-bfc3-1afce65aee64" containerID="7f451ca0d433d4929ea9339fdf33cd8eee9852ae3c51254352d35235bc5d48be" exitCode=0 Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.983939 4935 generic.go:334] "Generic (PLEG): container finished" podID="53179c75-37bd-4459-bfc3-1afce65aee64" containerID="9f2acc5d1f0ca6836312288c97e25001e65b4e4ab92d22ff3019a8fde01203e8" exitCode=0 Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.983721 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53179c75-37bd-4459-bfc3-1afce65aee64","Type":"ContainerDied","Data":"3470612512fefd9d8596e2946ccaeb0e6efcf837b325fcb2d1673dd049863fbe"} Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.984463 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53179c75-37bd-4459-bfc3-1afce65aee64","Type":"ContainerDied","Data":"428009dfcaa8251b3ed4f345e7562836efe53d977ea58004e312fd1f5686312b"} Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.984482 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53179c75-37bd-4459-bfc3-1afce65aee64","Type":"ContainerDied","Data":"7f451ca0d433d4929ea9339fdf33cd8eee9852ae3c51254352d35235bc5d48be"} Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.984496 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53179c75-37bd-4459-bfc3-1afce65aee64","Type":"ContainerDied","Data":"9f2acc5d1f0ca6836312288c97e25001e65b4e4ab92d22ff3019a8fde01203e8"} Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.984733 4935 scope.go:117] "RemoveContainer" containerID="4fbe75b7036e8c9316858dda35aba29a6b0bd39097c9c79033a66bea3fc1bca8" Dec 01 18:55:36 crc kubenswrapper[4935]: E1201 18:55:36.985048 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-764bd4bf4b-gnb6w_openstack(b7f13e0e-b593-44c8-b015-3bb2d32b896a)\"" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" podUID="b7f13e0e-b593-44c8-b015-3bb2d32b896a" Dec 01 18:55:36 crc kubenswrapper[4935]: I1201 18:55:36.985415 4935 scope.go:117] "RemoveContainer" containerID="c1b6dd7dbb2212d154227588ddb181af90caecc9fb9ec5ac8c3912353a2e68d9" Dec 01 18:55:36 crc kubenswrapper[4935]: E1201 18:55:36.985612 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-6674fffddb-zqfh2_openstack(2b1755ee-18df-46ef-be6d-b81b7967d831)\"" pod="openstack/heat-api-6674fffddb-zqfh2" podUID="2b1755ee-18df-46ef-be6d-b81b7967d831" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.098988 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-5chlp"] Dec 01 18:55:37 crc kubenswrapper[4935]: E1201 18:55:37.099465 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="693ddd06-a5c8-459d-8b68-d9e7a734809a" containerName="dnsmasq-dns" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.099481 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="693ddd06-a5c8-459d-8b68-d9e7a734809a" containerName="dnsmasq-dns" Dec 01 18:55:37 crc kubenswrapper[4935]: E1201 18:55:37.099516 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="693ddd06-a5c8-459d-8b68-d9e7a734809a" containerName="init" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.099523 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="693ddd06-a5c8-459d-8b68-d9e7a734809a" containerName="init" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.099739 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="693ddd06-a5c8-459d-8b68-d9e7a734809a" containerName="dnsmasq-dns" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.100484 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5chlp" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.116247 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-5chlp"] Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.124646 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.178024 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53179c75-37bd-4459-bfc3-1afce65aee64-log-httpd\") pod \"53179c75-37bd-4459-bfc3-1afce65aee64\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.178179 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53179c75-37bd-4459-bfc3-1afce65aee64-run-httpd\") pod \"53179c75-37bd-4459-bfc3-1afce65aee64\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.178601 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53179c75-37bd-4459-bfc3-1afce65aee64-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "53179c75-37bd-4459-bfc3-1afce65aee64" (UID: "53179c75-37bd-4459-bfc3-1afce65aee64"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.178827 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53179c75-37bd-4459-bfc3-1afce65aee64-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "53179c75-37bd-4459-bfc3-1afce65aee64" (UID: "53179c75-37bd-4459-bfc3-1afce65aee64"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.178903 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-scripts\") pod \"53179c75-37bd-4459-bfc3-1afce65aee64\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.179037 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-combined-ca-bundle\") pod \"53179c75-37bd-4459-bfc3-1afce65aee64\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.179123 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-sg-core-conf-yaml\") pod \"53179c75-37bd-4459-bfc3-1afce65aee64\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.179192 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wl5g7\" (UniqueName: \"kubernetes.io/projected/53179c75-37bd-4459-bfc3-1afce65aee64-kube-api-access-wl5g7\") pod \"53179c75-37bd-4459-bfc3-1afce65aee64\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.179236 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-config-data\") pod \"53179c75-37bd-4459-bfc3-1afce65aee64\" (UID: \"53179c75-37bd-4459-bfc3-1afce65aee64\") " Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.179813 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65f74b49-9135-49c6-af13-5107b45a5dd3-operator-scripts\") pod \"nova-api-db-create-5chlp\" (UID: \"65f74b49-9135-49c6-af13-5107b45a5dd3\") " pod="openstack/nova-api-db-create-5chlp" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.180598 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4p24\" (UniqueName: \"kubernetes.io/projected/65f74b49-9135-49c6-af13-5107b45a5dd3-kube-api-access-p4p24\") pod \"nova-api-db-create-5chlp\" (UID: \"65f74b49-9135-49c6-af13-5107b45a5dd3\") " pod="openstack/nova-api-db-create-5chlp" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.180681 4935 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53179c75-37bd-4459-bfc3-1afce65aee64-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.180696 4935 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53179c75-37bd-4459-bfc3-1afce65aee64-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.214184 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-qcl98"] Dec 01 18:55:37 crc kubenswrapper[4935]: E1201 18:55:37.215004 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="sg-core" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.215088 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="sg-core" Dec 01 18:55:37 crc kubenswrapper[4935]: E1201 18:55:37.215194 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="ceilometer-central-agent" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.215252 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="ceilometer-central-agent" Dec 01 18:55:37 crc kubenswrapper[4935]: E1201 18:55:37.215323 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="ceilometer-notification-agent" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.215376 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="ceilometer-notification-agent" Dec 01 18:55:37 crc kubenswrapper[4935]: E1201 18:55:37.215434 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="proxy-httpd" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.215492 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="proxy-httpd" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.215765 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="proxy-httpd" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.215838 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="ceilometer-central-agent" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.215904 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="sg-core" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.215974 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" containerName="ceilometer-notification-agent" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.216780 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-qcl98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.224925 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53179c75-37bd-4459-bfc3-1afce65aee64-kube-api-access-wl5g7" (OuterVolumeSpecName: "kube-api-access-wl5g7") pod "53179c75-37bd-4459-bfc3-1afce65aee64" (UID: "53179c75-37bd-4459-bfc3-1afce65aee64"). InnerVolumeSpecName "kube-api-access-wl5g7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.225016 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-scripts" (OuterVolumeSpecName: "scripts") pod "53179c75-37bd-4459-bfc3-1afce65aee64" (UID: "53179c75-37bd-4459-bfc3-1afce65aee64"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.258176 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-5142-account-create-update-nhlr5"] Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.259696 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5142-account-create-update-nhlr5" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.262039 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.278824 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-qcl98"] Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.282411 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8ca764e-f73d-424c-ac48-60af2d5729d2-operator-scripts\") pod \"nova-cell0-db-create-qcl98\" (UID: \"a8ca764e-f73d-424c-ac48-60af2d5729d2\") " pod="openstack/nova-cell0-db-create-qcl98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.282485 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65f74b49-9135-49c6-af13-5107b45a5dd3-operator-scripts\") pod \"nova-api-db-create-5chlp\" (UID: \"65f74b49-9135-49c6-af13-5107b45a5dd3\") " pod="openstack/nova-api-db-create-5chlp" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.282628 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4p24\" (UniqueName: \"kubernetes.io/projected/65f74b49-9135-49c6-af13-5107b45a5dd3-kube-api-access-p4p24\") pod \"nova-api-db-create-5chlp\" (UID: \"65f74b49-9135-49c6-af13-5107b45a5dd3\") " pod="openstack/nova-api-db-create-5chlp" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.282681 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkgvz\" (UniqueName: \"kubernetes.io/projected/a8ca764e-f73d-424c-ac48-60af2d5729d2-kube-api-access-lkgvz\") pod \"nova-cell0-db-create-qcl98\" (UID: \"a8ca764e-f73d-424c-ac48-60af2d5729d2\") " pod="openstack/nova-cell0-db-create-qcl98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.282717 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qfzf\" (UniqueName: \"kubernetes.io/projected/e0b1e7a8-88dd-4890-8971-eec026c2d209-kube-api-access-4qfzf\") pod \"nova-api-5142-account-create-update-nhlr5\" (UID: \"e0b1e7a8-88dd-4890-8971-eec026c2d209\") " pod="openstack/nova-api-5142-account-create-update-nhlr5" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.282775 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0b1e7a8-88dd-4890-8971-eec026c2d209-operator-scripts\") pod \"nova-api-5142-account-create-update-nhlr5\" (UID: \"e0b1e7a8-88dd-4890-8971-eec026c2d209\") " pod="openstack/nova-api-5142-account-create-update-nhlr5" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.283129 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.283175 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wl5g7\" (UniqueName: \"kubernetes.io/projected/53179c75-37bd-4459-bfc3-1afce65aee64-kube-api-access-wl5g7\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.283278 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65f74b49-9135-49c6-af13-5107b45a5dd3-operator-scripts\") pod \"nova-api-db-create-5chlp\" (UID: \"65f74b49-9135-49c6-af13-5107b45a5dd3\") " pod="openstack/nova-api-db-create-5chlp" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.294363 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5142-account-create-update-nhlr5"] Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.304000 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "53179c75-37bd-4459-bfc3-1afce65aee64" (UID: "53179c75-37bd-4459-bfc3-1afce65aee64"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.308115 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4p24\" (UniqueName: \"kubernetes.io/projected/65f74b49-9135-49c6-af13-5107b45a5dd3-kube-api-access-p4p24\") pod \"nova-api-db-create-5chlp\" (UID: \"65f74b49-9135-49c6-af13-5107b45a5dd3\") " pod="openstack/nova-api-db-create-5chlp" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.388059 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0b1e7a8-88dd-4890-8971-eec026c2d209-operator-scripts\") pod \"nova-api-5142-account-create-update-nhlr5\" (UID: \"e0b1e7a8-88dd-4890-8971-eec026c2d209\") " pod="openstack/nova-api-5142-account-create-update-nhlr5" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.388171 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8ca764e-f73d-424c-ac48-60af2d5729d2-operator-scripts\") pod \"nova-cell0-db-create-qcl98\" (UID: \"a8ca764e-f73d-424c-ac48-60af2d5729d2\") " pod="openstack/nova-cell0-db-create-qcl98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.388355 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkgvz\" (UniqueName: \"kubernetes.io/projected/a8ca764e-f73d-424c-ac48-60af2d5729d2-kube-api-access-lkgvz\") pod \"nova-cell0-db-create-qcl98\" (UID: \"a8ca764e-f73d-424c-ac48-60af2d5729d2\") " pod="openstack/nova-cell0-db-create-qcl98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.388400 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qfzf\" (UniqueName: \"kubernetes.io/projected/e0b1e7a8-88dd-4890-8971-eec026c2d209-kube-api-access-4qfzf\") pod \"nova-api-5142-account-create-update-nhlr5\" (UID: \"e0b1e7a8-88dd-4890-8971-eec026c2d209\") " pod="openstack/nova-api-5142-account-create-update-nhlr5" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.388474 4935 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.389863 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8ca764e-f73d-424c-ac48-60af2d5729d2-operator-scripts\") pod \"nova-cell0-db-create-qcl98\" (UID: \"a8ca764e-f73d-424c-ac48-60af2d5729d2\") " pod="openstack/nova-cell0-db-create-qcl98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.390679 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-config-data" (OuterVolumeSpecName: "config-data") pod "53179c75-37bd-4459-bfc3-1afce65aee64" (UID: "53179c75-37bd-4459-bfc3-1afce65aee64"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.394191 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0b1e7a8-88dd-4890-8971-eec026c2d209-operator-scripts\") pod \"nova-api-5142-account-create-update-nhlr5\" (UID: \"e0b1e7a8-88dd-4890-8971-eec026c2d209\") " pod="openstack/nova-api-5142-account-create-update-nhlr5" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.413288 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qfzf\" (UniqueName: \"kubernetes.io/projected/e0b1e7a8-88dd-4890-8971-eec026c2d209-kube-api-access-4qfzf\") pod \"nova-api-5142-account-create-update-nhlr5\" (UID: \"e0b1e7a8-88dd-4890-8971-eec026c2d209\") " pod="openstack/nova-api-5142-account-create-update-nhlr5" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.418998 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "53179c75-37bd-4459-bfc3-1afce65aee64" (UID: "53179c75-37bd-4459-bfc3-1afce65aee64"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.419826 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5142-account-create-update-nhlr5" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.420248 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-gnbwb"] Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.430261 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkgvz\" (UniqueName: \"kubernetes.io/projected/a8ca764e-f73d-424c-ac48-60af2d5729d2-kube-api-access-lkgvz\") pod \"nova-cell0-db-create-qcl98\" (UID: \"a8ca764e-f73d-424c-ac48-60af2d5729d2\") " pod="openstack/nova-cell0-db-create-qcl98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.431117 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gnbwb" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.435598 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-d2fe-account-create-update-9n2k8"] Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.441719 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.441800 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5chlp" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.450132 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.501115 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.541935 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53179c75-37bd-4459-bfc3-1afce65aee64-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.527243 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-gnbwb"] Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.572226 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d2fe-account-create-update-9n2k8"] Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.642277 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-7f6d-account-create-update-gtx98"] Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.644091 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.647933 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-727kc\" (UniqueName: \"kubernetes.io/projected/e155f755-b0bf-4344-a18d-4b54b783c589-kube-api-access-727kc\") pod \"nova-cell1-db-create-gnbwb\" (UID: \"e155f755-b0bf-4344-a18d-4b54b783c589\") " pod="openstack/nova-cell1-db-create-gnbwb" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.648007 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7429614-81d2-4c30-a261-321dd8d020dc-operator-scripts\") pod \"nova-cell0-d2fe-account-create-update-9n2k8\" (UID: \"f7429614-81d2-4c30-a261-321dd8d020dc\") " pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.648074 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml49x\" (UniqueName: \"kubernetes.io/projected/f7429614-81d2-4c30-a261-321dd8d020dc-kube-api-access-ml49x\") pod \"nova-cell0-d2fe-account-create-update-9n2k8\" (UID: \"f7429614-81d2-4c30-a261-321dd8d020dc\") " pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.648105 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e155f755-b0bf-4344-a18d-4b54b783c589-operator-scripts\") pod \"nova-cell1-db-create-gnbwb\" (UID: \"e155f755-b0bf-4344-a18d-4b54b783c589\") " pod="openstack/nova-cell1-db-create-gnbwb" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.652019 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.658543 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-7f6d-account-create-update-gtx98"] Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.688210 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-qcl98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.752049 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml49x\" (UniqueName: \"kubernetes.io/projected/f7429614-81d2-4c30-a261-321dd8d020dc-kube-api-access-ml49x\") pod \"nova-cell0-d2fe-account-create-update-9n2k8\" (UID: \"f7429614-81d2-4c30-a261-321dd8d020dc\") " pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.752582 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e155f755-b0bf-4344-a18d-4b54b783c589-operator-scripts\") pod \"nova-cell1-db-create-gnbwb\" (UID: \"e155f755-b0bf-4344-a18d-4b54b783c589\") " pod="openstack/nova-cell1-db-create-gnbwb" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.752873 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-727kc\" (UniqueName: \"kubernetes.io/projected/e155f755-b0bf-4344-a18d-4b54b783c589-kube-api-access-727kc\") pod \"nova-cell1-db-create-gnbwb\" (UID: \"e155f755-b0bf-4344-a18d-4b54b783c589\") " pod="openstack/nova-cell1-db-create-gnbwb" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.752989 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xp9g9\" (UniqueName: \"kubernetes.io/projected/240c55dd-0ae3-4867-9a2b-1608dad2c7c3-kube-api-access-xp9g9\") pod \"nova-cell1-7f6d-account-create-update-gtx98\" (UID: \"240c55dd-0ae3-4867-9a2b-1608dad2c7c3\") " pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.753131 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7429614-81d2-4c30-a261-321dd8d020dc-operator-scripts\") pod \"nova-cell0-d2fe-account-create-update-9n2k8\" (UID: \"f7429614-81d2-4c30-a261-321dd8d020dc\") " pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.753251 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/240c55dd-0ae3-4867-9a2b-1608dad2c7c3-operator-scripts\") pod \"nova-cell1-7f6d-account-create-update-gtx98\" (UID: \"240c55dd-0ae3-4867-9a2b-1608dad2c7c3\") " pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.754351 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7429614-81d2-4c30-a261-321dd8d020dc-operator-scripts\") pod \"nova-cell0-d2fe-account-create-update-9n2k8\" (UID: \"f7429614-81d2-4c30-a261-321dd8d020dc\") " pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.754366 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e155f755-b0bf-4344-a18d-4b54b783c589-operator-scripts\") pod \"nova-cell1-db-create-gnbwb\" (UID: \"e155f755-b0bf-4344-a18d-4b54b783c589\") " pod="openstack/nova-cell1-db-create-gnbwb" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.770203 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-727kc\" (UniqueName: \"kubernetes.io/projected/e155f755-b0bf-4344-a18d-4b54b783c589-kube-api-access-727kc\") pod \"nova-cell1-db-create-gnbwb\" (UID: \"e155f755-b0bf-4344-a18d-4b54b783c589\") " pod="openstack/nova-cell1-db-create-gnbwb" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.771594 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml49x\" (UniqueName: \"kubernetes.io/projected/f7429614-81d2-4c30-a261-321dd8d020dc-kube-api-access-ml49x\") pod \"nova-cell0-d2fe-account-create-update-9n2k8\" (UID: \"f7429614-81d2-4c30-a261-321dd8d020dc\") " pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.822875 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.856166 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/240c55dd-0ae3-4867-9a2b-1608dad2c7c3-operator-scripts\") pod \"nova-cell1-7f6d-account-create-update-gtx98\" (UID: \"240c55dd-0ae3-4867-9a2b-1608dad2c7c3\") " pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.856454 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xp9g9\" (UniqueName: \"kubernetes.io/projected/240c55dd-0ae3-4867-9a2b-1608dad2c7c3-kube-api-access-xp9g9\") pod \"nova-cell1-7f6d-account-create-update-gtx98\" (UID: \"240c55dd-0ae3-4867-9a2b-1608dad2c7c3\") " pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.857470 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/240c55dd-0ae3-4867-9a2b-1608dad2c7c3-operator-scripts\") pod \"nova-cell1-7f6d-account-create-update-gtx98\" (UID: \"240c55dd-0ae3-4867-9a2b-1608dad2c7c3\") " pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.876334 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xp9g9\" (UniqueName: \"kubernetes.io/projected/240c55dd-0ae3-4867-9a2b-1608dad2c7c3-kube-api-access-xp9g9\") pod \"nova-cell1-7f6d-account-create-update-gtx98\" (UID: \"240c55dd-0ae3-4867-9a2b-1608dad2c7c3\") " pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.882876 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.882922 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.935746 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.949524 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 18:55:37 crc kubenswrapper[4935]: I1201 18:55:37.979768 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.004428 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-5chlp"] Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.061009 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gnbwb" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.075306 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53179c75-37bd-4459-bfc3-1afce65aee64","Type":"ContainerDied","Data":"15ae3289c29a2f7e1ff7225288bede3aee931170ce99cf9017c3ebc1dd878f53"} Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.075362 4935 scope.go:117] "RemoveContainer" containerID="3470612512fefd9d8596e2946ccaeb0e6efcf837b325fcb2d1673dd049863fbe" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.075527 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.076353 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.076382 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.207616 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5142-account-create-update-nhlr5"] Dec 01 18:55:38 crc kubenswrapper[4935]: W1201 18:55:38.259668 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0b1e7a8_88dd_4890_8971_eec026c2d209.slice/crio-df39daa95fd9a0e4a7508d1ebd57a1b56c475f7601725db71ead316b4b727e53 WatchSource:0}: Error finding container df39daa95fd9a0e4a7508d1ebd57a1b56c475f7601725db71ead316b4b727e53: Status 404 returned error can't find the container with id df39daa95fd9a0e4a7508d1ebd57a1b56c475f7601725db71ead316b4b727e53 Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.321318 4935 scope.go:117] "RemoveContainer" containerID="428009dfcaa8251b3ed4f345e7562836efe53d977ea58004e312fd1f5686312b" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.341798 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.367587 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.376957 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.380474 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.383859 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.383865 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.392675 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.409215 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-qcl98"] Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.437945 4935 scope.go:117] "RemoveContainer" containerID="7f451ca0d433d4929ea9339fdf33cd8eee9852ae3c51254352d35235bc5d48be" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.486329 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-scripts\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.486411 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c773f2fd-7296-4d82-8a48-ed7acefcaac0-run-httpd\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.486498 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-config-data\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.486543 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.486668 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.486841 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c773f2fd-7296-4d82-8a48-ed7acefcaac0-log-httpd\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.487042 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr448\" (UniqueName: \"kubernetes.io/projected/c773f2fd-7296-4d82-8a48-ed7acefcaac0-kube-api-access-pr448\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.511984 4935 scope.go:117] "RemoveContainer" containerID="9f2acc5d1f0ca6836312288c97e25001e65b4e4ab92d22ff3019a8fde01203e8" Dec 01 18:55:38 crc kubenswrapper[4935]: W1201 18:55:38.526977 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7429614_81d2_4c30_a261_321dd8d020dc.slice/crio-ba4593a2c9e46a5e61ea7bb2f4e044e0a40130ecd0d2f34182e3844a7d363376 WatchSource:0}: Error finding container ba4593a2c9e46a5e61ea7bb2f4e044e0a40130ecd0d2f34182e3844a7d363376: Status 404 returned error can't find the container with id ba4593a2c9e46a5e61ea7bb2f4e044e0a40130ecd0d2f34182e3844a7d363376 Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.540265 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53179c75-37bd-4459-bfc3-1afce65aee64" path="/var/lib/kubelet/pods/53179c75-37bd-4459-bfc3-1afce65aee64/volumes" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.541269 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d2fe-account-create-update-9n2k8"] Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.590669 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-config-data\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.590722 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.590784 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.590865 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c773f2fd-7296-4d82-8a48-ed7acefcaac0-log-httpd\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.590963 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr448\" (UniqueName: \"kubernetes.io/projected/c773f2fd-7296-4d82-8a48-ed7acefcaac0-kube-api-access-pr448\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.591061 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-scripts\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.591101 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c773f2fd-7296-4d82-8a48-ed7acefcaac0-run-httpd\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.591532 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c773f2fd-7296-4d82-8a48-ed7acefcaac0-run-httpd\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.598010 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c773f2fd-7296-4d82-8a48-ed7acefcaac0-log-httpd\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.602173 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.602435 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.612284 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-config-data\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.623951 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-scripts\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.628785 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr448\" (UniqueName: \"kubernetes.io/projected/c773f2fd-7296-4d82-8a48-ed7acefcaac0-kube-api-access-pr448\") pod \"ceilometer-0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.637392 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.712922 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-7f6d-account-create-update-gtx98"] Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.723998 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:55:38 crc kubenswrapper[4935]: I1201 18:55:38.881895 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-gnbwb"] Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.145413 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5142-account-create-update-nhlr5" event={"ID":"e0b1e7a8-88dd-4890-8971-eec026c2d209","Type":"ContainerStarted","Data":"e2fc52c6df4b1b402806969a4bb97c7f47b4497942debe23465a2d57cb4851b8"} Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.145713 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5142-account-create-update-nhlr5" event={"ID":"e0b1e7a8-88dd-4890-8971-eec026c2d209","Type":"ContainerStarted","Data":"df39daa95fd9a0e4a7508d1ebd57a1b56c475f7601725db71ead316b4b727e53"} Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.193070 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" event={"ID":"240c55dd-0ae3-4867-9a2b-1608dad2c7c3","Type":"ContainerStarted","Data":"2008f00624823d1218e8c89a73f200d28cc6bb9ddfdb6187657c3eedc37fe849"} Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.198999 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-qcl98" event={"ID":"a8ca764e-f73d-424c-ac48-60af2d5729d2","Type":"ContainerStarted","Data":"57e00f233832965cad5cf29aef91055ecec6ba0271a1183acf5dada0e6b57f4d"} Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.199055 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-qcl98" event={"ID":"a8ca764e-f73d-424c-ac48-60af2d5729d2","Type":"ContainerStarted","Data":"2f13e2e1a73fb7d7b9b4a5473f28689435f04ae2d5657ef4e177c2df40d4bba6"} Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.200331 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-gnbwb" event={"ID":"e155f755-b0bf-4344-a18d-4b54b783c589","Type":"ContainerStarted","Data":"0a57f11d4a9e5e7554156774b68f6d1b06312233fd179941220f4dab270ced82"} Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.216998 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5chlp" event={"ID":"65f74b49-9135-49c6-af13-5107b45a5dd3","Type":"ContainerStarted","Data":"0ae9dfbd52e4ab66ee55d0f32d02ecec7530ed96c084ebf800933c5c64fe7fdc"} Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.217054 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5chlp" event={"ID":"65f74b49-9135-49c6-af13-5107b45a5dd3","Type":"ContainerStarted","Data":"e1c53b2858c2bd1304c4a0e9017ee19c064bc0c298a29be44d834cf5ec6ff794"} Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.229513 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" event={"ID":"f7429614-81d2-4c30-a261-321dd8d020dc","Type":"ContainerStarted","Data":"28e492e821f79da37953b76a30fe28f93a2c7d6a388d5d606318348c8cc2985b"} Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.229557 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" event={"ID":"f7429614-81d2-4c30-a261-321dd8d020dc","Type":"ContainerStarted","Data":"ba4593a2c9e46a5e61ea7bb2f4e044e0a40130ecd0d2f34182e3844a7d363376"} Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.263892 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-5142-account-create-update-nhlr5" podStartSLOduration=2.263862436 podStartE2EDuration="2.263862436s" podCreationTimestamp="2025-12-01 18:55:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:39.185522877 +0000 UTC m=+1553.207152146" watchObservedRunningTime="2025-12-01 18:55:39.263862436 +0000 UTC m=+1553.285491695" Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.284395 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-qcl98" podStartSLOduration=2.284372132 podStartE2EDuration="2.284372132s" podCreationTimestamp="2025-12-01 18:55:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:39.222546645 +0000 UTC m=+1553.244175904" watchObservedRunningTime="2025-12-01 18:55:39.284372132 +0000 UTC m=+1553.306001391" Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.403357 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" podStartSLOduration=2.403336159 podStartE2EDuration="2.403336159s" podCreationTimestamp="2025-12-01 18:55:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:39.338702955 +0000 UTC m=+1553.360332214" watchObservedRunningTime="2025-12-01 18:55:39.403336159 +0000 UTC m=+1553.424965418" Dec 01 18:55:39 crc kubenswrapper[4935]: I1201 18:55:39.673621 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:40 crc kubenswrapper[4935]: I1201 18:55:40.260426 4935 generic.go:334] "Generic (PLEG): container finished" podID="65f74b49-9135-49c6-af13-5107b45a5dd3" containerID="0ae9dfbd52e4ab66ee55d0f32d02ecec7530ed96c084ebf800933c5c64fe7fdc" exitCode=0 Dec 01 18:55:40 crc kubenswrapper[4935]: I1201 18:55:40.260486 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5chlp" event={"ID":"65f74b49-9135-49c6-af13-5107b45a5dd3","Type":"ContainerDied","Data":"0ae9dfbd52e4ab66ee55d0f32d02ecec7530ed96c084ebf800933c5c64fe7fdc"} Dec 01 18:55:40 crc kubenswrapper[4935]: I1201 18:55:40.272437 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c773f2fd-7296-4d82-8a48-ed7acefcaac0","Type":"ContainerStarted","Data":"3f8d3f9bedddc8acfe9eb8d773b6a3255b63dda484b3ee8f4f1ab8554957feee"} Dec 01 18:55:40 crc kubenswrapper[4935]: I1201 18:55:40.877613 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5chlp" Dec 01 18:55:40 crc kubenswrapper[4935]: I1201 18:55:40.991703 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4p24\" (UniqueName: \"kubernetes.io/projected/65f74b49-9135-49c6-af13-5107b45a5dd3-kube-api-access-p4p24\") pod \"65f74b49-9135-49c6-af13-5107b45a5dd3\" (UID: \"65f74b49-9135-49c6-af13-5107b45a5dd3\") " Dec 01 18:55:40 crc kubenswrapper[4935]: I1201 18:55:40.991913 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65f74b49-9135-49c6-af13-5107b45a5dd3-operator-scripts\") pod \"65f74b49-9135-49c6-af13-5107b45a5dd3\" (UID: \"65f74b49-9135-49c6-af13-5107b45a5dd3\") " Dec 01 18:55:40 crc kubenswrapper[4935]: I1201 18:55:40.992591 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65f74b49-9135-49c6-af13-5107b45a5dd3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "65f74b49-9135-49c6-af13-5107b45a5dd3" (UID: "65f74b49-9135-49c6-af13-5107b45a5dd3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:40 crc kubenswrapper[4935]: I1201 18:55:40.998530 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65f74b49-9135-49c6-af13-5107b45a5dd3-kube-api-access-p4p24" (OuterVolumeSpecName: "kube-api-access-p4p24") pod "65f74b49-9135-49c6-af13-5107b45a5dd3" (UID: "65f74b49-9135-49c6-af13-5107b45a5dd3"). InnerVolumeSpecName "kube-api-access-p4p24". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.094757 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65f74b49-9135-49c6-af13-5107b45a5dd3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.095035 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4p24\" (UniqueName: \"kubernetes.io/projected/65f74b49-9135-49c6-af13-5107b45a5dd3-kube-api-access-p4p24\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.282470 4935 generic.go:334] "Generic (PLEG): container finished" podID="f7429614-81d2-4c30-a261-321dd8d020dc" containerID="28e492e821f79da37953b76a30fe28f93a2c7d6a388d5d606318348c8cc2985b" exitCode=0 Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.282538 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" event={"ID":"f7429614-81d2-4c30-a261-321dd8d020dc","Type":"ContainerDied","Data":"28e492e821f79da37953b76a30fe28f93a2c7d6a388d5d606318348c8cc2985b"} Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.284606 4935 generic.go:334] "Generic (PLEG): container finished" podID="e0b1e7a8-88dd-4890-8971-eec026c2d209" containerID="e2fc52c6df4b1b402806969a4bb97c7f47b4497942debe23465a2d57cb4851b8" exitCode=0 Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.284652 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5142-account-create-update-nhlr5" event={"ID":"e0b1e7a8-88dd-4890-8971-eec026c2d209","Type":"ContainerDied","Data":"e2fc52c6df4b1b402806969a4bb97c7f47b4497942debe23465a2d57cb4851b8"} Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.286305 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" event={"ID":"240c55dd-0ae3-4867-9a2b-1608dad2c7c3","Type":"ContainerStarted","Data":"fd8359a5f4ff01b7307717f6d351ecd0b8e40c46e58d3d8f08ce137d5150f135"} Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.288429 4935 generic.go:334] "Generic (PLEG): container finished" podID="a8ca764e-f73d-424c-ac48-60af2d5729d2" containerID="57e00f233832965cad5cf29aef91055ecec6ba0271a1183acf5dada0e6b57f4d" exitCode=0 Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.288468 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-qcl98" event={"ID":"a8ca764e-f73d-424c-ac48-60af2d5729d2","Type":"ContainerDied","Data":"57e00f233832965cad5cf29aef91055ecec6ba0271a1183acf5dada0e6b57f4d"} Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.290230 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-gnbwb" event={"ID":"e155f755-b0bf-4344-a18d-4b54b783c589","Type":"ContainerStarted","Data":"c4c36780c4c88b1e623dd759bd59ee9330fa5edc20a76956b817b57ea390c9ec"} Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.293362 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5chlp" Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.293418 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5chlp" event={"ID":"65f74b49-9135-49c6-af13-5107b45a5dd3","Type":"ContainerDied","Data":"e1c53b2858c2bd1304c4a0e9017ee19c064bc0c298a29be44d834cf5ec6ff794"} Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.293446 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1c53b2858c2bd1304c4a0e9017ee19c064bc0c298a29be44d834cf5ec6ff794" Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.341370 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" podStartSLOduration=4.341345566 podStartE2EDuration="4.341345566s" podCreationTimestamp="2025-12-01 18:55:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:41.33002343 +0000 UTC m=+1555.351652689" watchObservedRunningTime="2025-12-01 18:55:41.341345566 +0000 UTC m=+1555.362974825" Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.347679 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-gnbwb" podStartSLOduration=4.347661836 podStartE2EDuration="4.347661836s" podCreationTimestamp="2025-12-01 18:55:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:55:41.345480057 +0000 UTC m=+1555.367109316" watchObservedRunningTime="2025-12-01 18:55:41.347661836 +0000 UTC m=+1555.369291095" Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.963813 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:55:41 crc kubenswrapper[4935]: I1201 18:55:41.966158 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:55:42 crc kubenswrapper[4935]: I1201 18:55:42.132346 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-6674fffddb-zqfh2"] Dec 01 18:55:42 crc kubenswrapper[4935]: I1201 18:55:42.197863 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:55:42 crc kubenswrapper[4935]: I1201 18:55:42.220700 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:55:42 crc kubenswrapper[4935]: I1201 18:55:42.323657 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-764bd4bf4b-gnb6w"] Dec 01 18:55:42 crc kubenswrapper[4935]: I1201 18:55:42.351759 4935 generic.go:334] "Generic (PLEG): container finished" podID="e155f755-b0bf-4344-a18d-4b54b783c589" containerID="c4c36780c4c88b1e623dd759bd59ee9330fa5edc20a76956b817b57ea390c9ec" exitCode=0 Dec 01 18:55:42 crc kubenswrapper[4935]: I1201 18:55:42.351847 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-gnbwb" event={"ID":"e155f755-b0bf-4344-a18d-4b54b783c589","Type":"ContainerDied","Data":"c4c36780c4c88b1e623dd759bd59ee9330fa5edc20a76956b817b57ea390c9ec"} Dec 01 18:55:42 crc kubenswrapper[4935]: I1201 18:55:42.366846 4935 generic.go:334] "Generic (PLEG): container finished" podID="240c55dd-0ae3-4867-9a2b-1608dad2c7c3" containerID="fd8359a5f4ff01b7307717f6d351ecd0b8e40c46e58d3d8f08ce137d5150f135" exitCode=0 Dec 01 18:55:42 crc kubenswrapper[4935]: I1201 18:55:42.367165 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" event={"ID":"240c55dd-0ae3-4867-9a2b-1608dad2c7c3","Type":"ContainerDied","Data":"fd8359a5f4ff01b7307717f6d351ecd0b8e40c46e58d3d8f08ce137d5150f135"} Dec 01 18:55:42 crc kubenswrapper[4935]: I1201 18:55:42.415241 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c773f2fd-7296-4d82-8a48-ed7acefcaac0","Type":"ContainerStarted","Data":"4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18"} Dec 01 18:55:42 crc kubenswrapper[4935]: I1201 18:55:42.874378 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 18:55:42 crc kubenswrapper[4935]: I1201 18:55:42.874638 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 18:55:42 crc kubenswrapper[4935]: I1201 18:55:42.949439 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 18:55:42 crc kubenswrapper[4935]: I1201 18:55:42.968054 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.192394 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.235193 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.311309 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-config-data-custom\") pod \"2b1755ee-18df-46ef-be6d-b81b7967d831\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.311423 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7n2t\" (UniqueName: \"kubernetes.io/projected/2b1755ee-18df-46ef-be6d-b81b7967d831-kube-api-access-n7n2t\") pod \"2b1755ee-18df-46ef-be6d-b81b7967d831\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.311603 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-combined-ca-bundle\") pod \"2b1755ee-18df-46ef-be6d-b81b7967d831\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.311636 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-config-data\") pod \"2b1755ee-18df-46ef-be6d-b81b7967d831\" (UID: \"2b1755ee-18df-46ef-be6d-b81b7967d831\") " Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.316625 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b1755ee-18df-46ef-be6d-b81b7967d831-kube-api-access-n7n2t" (OuterVolumeSpecName: "kube-api-access-n7n2t") pod "2b1755ee-18df-46ef-be6d-b81b7967d831" (UID: "2b1755ee-18df-46ef-be6d-b81b7967d831"). InnerVolumeSpecName "kube-api-access-n7n2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.320715 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2b1755ee-18df-46ef-be6d-b81b7967d831" (UID: "2b1755ee-18df-46ef-be6d-b81b7967d831"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.394571 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2b1755ee-18df-46ef-be6d-b81b7967d831" (UID: "2b1755ee-18df-46ef-be6d-b81b7967d831"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.413681 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data-custom\") pod \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.414375 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data\") pod \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.414613 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-combined-ca-bundle\") pod \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.415629 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9jlq\" (UniqueName: \"kubernetes.io/projected/b7f13e0e-b593-44c8-b015-3bb2d32b896a-kube-api-access-t9jlq\") pod \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.416418 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.416500 4935 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.416561 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7n2t\" (UniqueName: \"kubernetes.io/projected/2b1755ee-18df-46ef-be6d-b81b7967d831-kube-api-access-n7n2t\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.422544 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b7f13e0e-b593-44c8-b015-3bb2d32b896a" (UID: "b7f13e0e-b593-44c8-b015-3bb2d32b896a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.422845 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7f13e0e-b593-44c8-b015-3bb2d32b896a-kube-api-access-t9jlq" (OuterVolumeSpecName: "kube-api-access-t9jlq") pod "b7f13e0e-b593-44c8-b015-3bb2d32b896a" (UID: "b7f13e0e-b593-44c8-b015-3bb2d32b896a"). InnerVolumeSpecName "kube-api-access-t9jlq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.464089 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" event={"ID":"b7f13e0e-b593-44c8-b015-3bb2d32b896a","Type":"ContainerDied","Data":"dc66717073546adec236e4b21f0e90fceda1e9cde856b5a26c7a321cf27fd215"} Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.464306 4935 scope.go:117] "RemoveContainer" containerID="4fbe75b7036e8c9316858dda35aba29a6b0bd39097c9c79033a66bea3fc1bca8" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.464413 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-764bd4bf4b-gnb6w" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.475112 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-config-data" (OuterVolumeSpecName: "config-data") pod "2b1755ee-18df-46ef-be6d-b81b7967d831" (UID: "2b1755ee-18df-46ef-be6d-b81b7967d831"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.482393 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c773f2fd-7296-4d82-8a48-ed7acefcaac0","Type":"ContainerStarted","Data":"3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9"} Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.490632 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b7f13e0e-b593-44c8-b015-3bb2d32b896a" (UID: "b7f13e0e-b593-44c8-b015-3bb2d32b896a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.511396 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-6674fffddb-zqfh2" event={"ID":"2b1755ee-18df-46ef-be6d-b81b7967d831","Type":"ContainerDied","Data":"082103566ad9323be7fec58e2865592286af1490c34f333110d47aae39dd45b7"} Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.520588 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-6674fffddb-zqfh2" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.528475 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.554324 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b1755ee-18df-46ef-be6d-b81b7967d831-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.531104 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.554355 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.554443 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9jlq\" (UniqueName: \"kubernetes.io/projected/b7f13e0e-b593-44c8-b015-3bb2d32b896a-kube-api-access-t9jlq\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.555529 4935 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.609509 4935 scope.go:117] "RemoveContainer" containerID="c1b6dd7dbb2212d154227588ddb181af90caecc9fb9ec5ac8c3912353a2e68d9" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.664735 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-6674fffddb-zqfh2"] Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.716513 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-6674fffddb-zqfh2"] Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.763391 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data" (OuterVolumeSpecName: "config-data") pod "b7f13e0e-b593-44c8-b015-3bb2d32b896a" (UID: "b7f13e0e-b593-44c8-b015-3bb2d32b896a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.763485 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data\") pod \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\" (UID: \"b7f13e0e-b593-44c8-b015-3bb2d32b896a\") " Dec 01 18:55:43 crc kubenswrapper[4935]: W1201 18:55:43.764833 4935 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/b7f13e0e-b593-44c8-b015-3bb2d32b896a/volumes/kubernetes.io~secret/config-data Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.764853 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data" (OuterVolumeSpecName: "config-data") pod "b7f13e0e-b593-44c8-b015-3bb2d32b896a" (UID: "b7f13e0e-b593-44c8-b015-3bb2d32b896a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:43 crc kubenswrapper[4935]: I1201 18:55:43.769223 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f13e0e-b593-44c8-b015-3bb2d32b896a-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.019265 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.019364 4935 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.028857 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.043614 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-764bd4bf4b-gnb6w"] Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.068091 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-qcl98" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.105897 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-764bd4bf4b-gnb6w"] Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.124526 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5142-account-create-update-nhlr5" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.185373 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qfzf\" (UniqueName: \"kubernetes.io/projected/e0b1e7a8-88dd-4890-8971-eec026c2d209-kube-api-access-4qfzf\") pod \"e0b1e7a8-88dd-4890-8971-eec026c2d209\" (UID: \"e0b1e7a8-88dd-4890-8971-eec026c2d209\") " Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.185620 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ml49x\" (UniqueName: \"kubernetes.io/projected/f7429614-81d2-4c30-a261-321dd8d020dc-kube-api-access-ml49x\") pod \"f7429614-81d2-4c30-a261-321dd8d020dc\" (UID: \"f7429614-81d2-4c30-a261-321dd8d020dc\") " Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.185661 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkgvz\" (UniqueName: \"kubernetes.io/projected/a8ca764e-f73d-424c-ac48-60af2d5729d2-kube-api-access-lkgvz\") pod \"a8ca764e-f73d-424c-ac48-60af2d5729d2\" (UID: \"a8ca764e-f73d-424c-ac48-60af2d5729d2\") " Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.185737 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0b1e7a8-88dd-4890-8971-eec026c2d209-operator-scripts\") pod \"e0b1e7a8-88dd-4890-8971-eec026c2d209\" (UID: \"e0b1e7a8-88dd-4890-8971-eec026c2d209\") " Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.185807 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8ca764e-f73d-424c-ac48-60af2d5729d2-operator-scripts\") pod \"a8ca764e-f73d-424c-ac48-60af2d5729d2\" (UID: \"a8ca764e-f73d-424c-ac48-60af2d5729d2\") " Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.185837 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7429614-81d2-4c30-a261-321dd8d020dc-operator-scripts\") pod \"f7429614-81d2-4c30-a261-321dd8d020dc\" (UID: \"f7429614-81d2-4c30-a261-321dd8d020dc\") " Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.187683 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0b1e7a8-88dd-4890-8971-eec026c2d209-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e0b1e7a8-88dd-4890-8971-eec026c2d209" (UID: "e0b1e7a8-88dd-4890-8971-eec026c2d209"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.188101 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8ca764e-f73d-424c-ac48-60af2d5729d2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a8ca764e-f73d-424c-ac48-60af2d5729d2" (UID: "a8ca764e-f73d-424c-ac48-60af2d5729d2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.188726 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7429614-81d2-4c30-a261-321dd8d020dc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f7429614-81d2-4c30-a261-321dd8d020dc" (UID: "f7429614-81d2-4c30-a261-321dd8d020dc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.216032 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7429614-81d2-4c30-a261-321dd8d020dc-kube-api-access-ml49x" (OuterVolumeSpecName: "kube-api-access-ml49x") pod "f7429614-81d2-4c30-a261-321dd8d020dc" (UID: "f7429614-81d2-4c30-a261-321dd8d020dc"). InnerVolumeSpecName "kube-api-access-ml49x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.218806 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0b1e7a8-88dd-4890-8971-eec026c2d209-kube-api-access-4qfzf" (OuterVolumeSpecName: "kube-api-access-4qfzf") pod "e0b1e7a8-88dd-4890-8971-eec026c2d209" (UID: "e0b1e7a8-88dd-4890-8971-eec026c2d209"). InnerVolumeSpecName "kube-api-access-4qfzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.219457 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8ca764e-f73d-424c-ac48-60af2d5729d2-kube-api-access-lkgvz" (OuterVolumeSpecName: "kube-api-access-lkgvz") pod "a8ca764e-f73d-424c-ac48-60af2d5729d2" (UID: "a8ca764e-f73d-424c-ac48-60af2d5729d2"). InnerVolumeSpecName "kube-api-access-lkgvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.288105 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ml49x\" (UniqueName: \"kubernetes.io/projected/f7429614-81d2-4c30-a261-321dd8d020dc-kube-api-access-ml49x\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.288137 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkgvz\" (UniqueName: \"kubernetes.io/projected/a8ca764e-f73d-424c-ac48-60af2d5729d2-kube-api-access-lkgvz\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.330689 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0b1e7a8-88dd-4890-8971-eec026c2d209-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.330754 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8ca764e-f73d-424c-ac48-60af2d5729d2-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.330766 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7429614-81d2-4c30-a261-321dd8d020dc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.330778 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qfzf\" (UniqueName: \"kubernetes.io/projected/e0b1e7a8-88dd-4890-8971-eec026c2d209-kube-api-access-4qfzf\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.431494 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.436985 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.439464 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/240c55dd-0ae3-4867-9a2b-1608dad2c7c3-operator-scripts\") pod \"240c55dd-0ae3-4867-9a2b-1608dad2c7c3\" (UID: \"240c55dd-0ae3-4867-9a2b-1608dad2c7c3\") " Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.439760 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xp9g9\" (UniqueName: \"kubernetes.io/projected/240c55dd-0ae3-4867-9a2b-1608dad2c7c3-kube-api-access-xp9g9\") pod \"240c55dd-0ae3-4867-9a2b-1608dad2c7c3\" (UID: \"240c55dd-0ae3-4867-9a2b-1608dad2c7c3\") " Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.440721 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/240c55dd-0ae3-4867-9a2b-1608dad2c7c3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "240c55dd-0ae3-4867-9a2b-1608dad2c7c3" (UID: "240c55dd-0ae3-4867-9a2b-1608dad2c7c3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.448491 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/240c55dd-0ae3-4867-9a2b-1608dad2c7c3-kube-api-access-xp9g9" (OuterVolumeSpecName: "kube-api-access-xp9g9") pod "240c55dd-0ae3-4867-9a2b-1608dad2c7c3" (UID: "240c55dd-0ae3-4867-9a2b-1608dad2c7c3"). InnerVolumeSpecName "kube-api-access-xp9g9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.462509 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gnbwb" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.582160 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xp9g9\" (UniqueName: \"kubernetes.io/projected/240c55dd-0ae3-4867-9a2b-1608dad2c7c3-kube-api-access-xp9g9\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.582408 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/240c55dd-0ae3-4867-9a2b-1608dad2c7c3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.602091 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gnbwb" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.605785 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b1755ee-18df-46ef-be6d-b81b7967d831" path="/var/lib/kubelet/pods/2b1755ee-18df-46ef-be6d-b81b7967d831/volumes" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.606755 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7f13e0e-b593-44c8-b015-3bb2d32b896a" path="/var/lib/kubelet/pods/b7f13e0e-b593-44c8-b015-3bb2d32b896a/volumes" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.623785 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.628983 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c773f2fd-7296-4d82-8a48-ed7acefcaac0","Type":"ContainerStarted","Data":"eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807"} Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.629027 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-gnbwb" event={"ID":"e155f755-b0bf-4344-a18d-4b54b783c589","Type":"ContainerDied","Data":"0a57f11d4a9e5e7554156774b68f6d1b06312233fd179941220f4dab270ced82"} Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.629043 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a57f11d4a9e5e7554156774b68f6d1b06312233fd179941220f4dab270ced82" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.629057 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d2fe-account-create-update-9n2k8" event={"ID":"f7429614-81d2-4c30-a261-321dd8d020dc","Type":"ContainerDied","Data":"ba4593a2c9e46a5e61ea7bb2f4e044e0a40130ecd0d2f34182e3844a7d363376"} Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.629068 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba4593a2c9e46a5e61ea7bb2f4e044e0a40130ecd0d2f34182e3844a7d363376" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.682794 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e155f755-b0bf-4344-a18d-4b54b783c589-operator-scripts\") pod \"e155f755-b0bf-4344-a18d-4b54b783c589\" (UID: \"e155f755-b0bf-4344-a18d-4b54b783c589\") " Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.682890 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-727kc\" (UniqueName: \"kubernetes.io/projected/e155f755-b0bf-4344-a18d-4b54b783c589-kube-api-access-727kc\") pod \"e155f755-b0bf-4344-a18d-4b54b783c589\" (UID: \"e155f755-b0bf-4344-a18d-4b54b783c589\") " Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.683445 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e155f755-b0bf-4344-a18d-4b54b783c589-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e155f755-b0bf-4344-a18d-4b54b783c589" (UID: "e155f755-b0bf-4344-a18d-4b54b783c589"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.685752 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5142-account-create-update-nhlr5" event={"ID":"e0b1e7a8-88dd-4890-8971-eec026c2d209","Type":"ContainerDied","Data":"df39daa95fd9a0e4a7508d1ebd57a1b56c475f7601725db71ead316b4b727e53"} Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.685788 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df39daa95fd9a0e4a7508d1ebd57a1b56c475f7601725db71ead316b4b727e53" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.685876 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5142-account-create-update-nhlr5" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.692793 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" event={"ID":"240c55dd-0ae3-4867-9a2b-1608dad2c7c3","Type":"ContainerDied","Data":"2008f00624823d1218e8c89a73f200d28cc6bb9ddfdb6187657c3eedc37fe849"} Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.692982 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2008f00624823d1218e8c89a73f200d28cc6bb9ddfdb6187657c3eedc37fe849" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.693174 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-7f6d-account-create-update-gtx98" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.702365 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e155f755-b0bf-4344-a18d-4b54b783c589-kube-api-access-727kc" (OuterVolumeSpecName: "kube-api-access-727kc") pod "e155f755-b0bf-4344-a18d-4b54b783c589" (UID: "e155f755-b0bf-4344-a18d-4b54b783c589"). InnerVolumeSpecName "kube-api-access-727kc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.713262 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-qcl98" event={"ID":"a8ca764e-f73d-424c-ac48-60af2d5729d2","Type":"ContainerDied","Data":"2f13e2e1a73fb7d7b9b4a5473f28689435f04ae2d5657ef4e177c2df40d4bba6"} Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.713320 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f13e2e1a73fb7d7b9b4a5473f28689435f04ae2d5657ef4e177c2df40d4bba6" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.713322 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-qcl98" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.790979 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e155f755-b0bf-4344-a18d-4b54b783c589-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:44 crc kubenswrapper[4935]: I1201 18:55:44.791008 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-727kc\" (UniqueName: \"kubernetes.io/projected/e155f755-b0bf-4344-a18d-4b54b783c589-kube-api-access-727kc\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:45 crc kubenswrapper[4935]: I1201 18:55:45.754725 4935 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 18:55:45 crc kubenswrapper[4935]: I1201 18:55:45.754900 4935 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 18:55:46 crc kubenswrapper[4935]: I1201 18:55:46.458277 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 18:55:46 crc kubenswrapper[4935]: I1201 18:55:46.539716 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-7b798c567c-9slfq"] Dec 01 18:55:46 crc kubenswrapper[4935]: I1201 18:55:46.539906 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-engine-7b798c567c-9slfq" podUID="67884ebb-46bf-417e-b499-776f74720d64" containerName="heat-engine" containerID="cri-o://0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8" gracePeriod=60 Dec 01 18:55:46 crc kubenswrapper[4935]: I1201 18:55:46.768928 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c773f2fd-7296-4d82-8a48-ed7acefcaac0","Type":"ContainerStarted","Data":"5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a"} Dec 01 18:55:46 crc kubenswrapper[4935]: I1201 18:55:46.769269 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 18:55:46 crc kubenswrapper[4935]: I1201 18:55:46.793475 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.870840561 podStartE2EDuration="8.793452435s" podCreationTimestamp="2025-12-01 18:55:38 +0000 UTC" firstStartedPulling="2025-12-01 18:55:39.614092202 +0000 UTC m=+1553.635721451" lastFinishedPulling="2025-12-01 18:55:45.536704066 +0000 UTC m=+1559.558333325" observedRunningTime="2025-12-01 18:55:46.788583711 +0000 UTC m=+1560.810212970" watchObservedRunningTime="2025-12-01 18:55:46.793452435 +0000 UTC m=+1560.815081694" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.607421 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.732758 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nr876"] Dec 01 18:55:47 crc kubenswrapper[4935]: E1201 18:55:47.733241 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b1755ee-18df-46ef-be6d-b81b7967d831" containerName="heat-api" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733257 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b1755ee-18df-46ef-be6d-b81b7967d831" containerName="heat-api" Dec 01 18:55:47 crc kubenswrapper[4935]: E1201 18:55:47.733285 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="240c55dd-0ae3-4867-9a2b-1608dad2c7c3" containerName="mariadb-account-create-update" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733292 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="240c55dd-0ae3-4867-9a2b-1608dad2c7c3" containerName="mariadb-account-create-update" Dec 01 18:55:47 crc kubenswrapper[4935]: E1201 18:55:47.733304 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e155f755-b0bf-4344-a18d-4b54b783c589" containerName="mariadb-database-create" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733311 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e155f755-b0bf-4344-a18d-4b54b783c589" containerName="mariadb-database-create" Dec 01 18:55:47 crc kubenswrapper[4935]: E1201 18:55:47.733328 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7f13e0e-b593-44c8-b015-3bb2d32b896a" containerName="heat-cfnapi" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733333 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7f13e0e-b593-44c8-b015-3bb2d32b896a" containerName="heat-cfnapi" Dec 01 18:55:47 crc kubenswrapper[4935]: E1201 18:55:47.733340 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7f13e0e-b593-44c8-b015-3bb2d32b896a" containerName="heat-cfnapi" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733346 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7f13e0e-b593-44c8-b015-3bb2d32b896a" containerName="heat-cfnapi" Dec 01 18:55:47 crc kubenswrapper[4935]: E1201 18:55:47.733361 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8ca764e-f73d-424c-ac48-60af2d5729d2" containerName="mariadb-database-create" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733367 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8ca764e-f73d-424c-ac48-60af2d5729d2" containerName="mariadb-database-create" Dec 01 18:55:47 crc kubenswrapper[4935]: E1201 18:55:47.733380 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7429614-81d2-4c30-a261-321dd8d020dc" containerName="mariadb-account-create-update" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733386 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7429614-81d2-4c30-a261-321dd8d020dc" containerName="mariadb-account-create-update" Dec 01 18:55:47 crc kubenswrapper[4935]: E1201 18:55:47.733406 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f74b49-9135-49c6-af13-5107b45a5dd3" containerName="mariadb-database-create" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733412 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f74b49-9135-49c6-af13-5107b45a5dd3" containerName="mariadb-database-create" Dec 01 18:55:47 crc kubenswrapper[4935]: E1201 18:55:47.733420 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b1e7a8-88dd-4890-8971-eec026c2d209" containerName="mariadb-account-create-update" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733426 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b1e7a8-88dd-4890-8971-eec026c2d209" containerName="mariadb-account-create-update" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733647 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7429614-81d2-4c30-a261-321dd8d020dc" containerName="mariadb-account-create-update" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733663 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7f13e0e-b593-44c8-b015-3bb2d32b896a" containerName="heat-cfnapi" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733673 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f74b49-9135-49c6-af13-5107b45a5dd3" containerName="mariadb-database-create" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733686 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8ca764e-f73d-424c-ac48-60af2d5729d2" containerName="mariadb-database-create" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733695 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="240c55dd-0ae3-4867-9a2b-1608dad2c7c3" containerName="mariadb-account-create-update" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733703 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e155f755-b0bf-4344-a18d-4b54b783c589" containerName="mariadb-database-create" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733716 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b1755ee-18df-46ef-be6d-b81b7967d831" containerName="heat-api" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.733730 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b1e7a8-88dd-4890-8971-eec026c2d209" containerName="mariadb-account-create-update" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.734511 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.741204 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.741449 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.746112 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-bmk65" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.746914 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nr876"] Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.829872 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.830231 4935 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.835048 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.911401 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5g66\" (UniqueName: \"kubernetes.io/projected/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-kube-api-access-w5g66\") pod \"nova-cell0-conductor-db-sync-nr876\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.912266 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nr876\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.912698 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-scripts\") pod \"nova-cell0-conductor-db-sync-nr876\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:47 crc kubenswrapper[4935]: I1201 18:55:47.912852 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-config-data\") pod \"nova-cell0-conductor-db-sync-nr876\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:48 crc kubenswrapper[4935]: I1201 18:55:48.015648 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-scripts\") pod \"nova-cell0-conductor-db-sync-nr876\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:48 crc kubenswrapper[4935]: I1201 18:55:48.015724 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-config-data\") pod \"nova-cell0-conductor-db-sync-nr876\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:48 crc kubenswrapper[4935]: I1201 18:55:48.015790 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5g66\" (UniqueName: \"kubernetes.io/projected/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-kube-api-access-w5g66\") pod \"nova-cell0-conductor-db-sync-nr876\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:48 crc kubenswrapper[4935]: I1201 18:55:48.015845 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nr876\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:48 crc kubenswrapper[4935]: I1201 18:55:48.022313 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-config-data\") pod \"nova-cell0-conductor-db-sync-nr876\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:48 crc kubenswrapper[4935]: I1201 18:55:48.025453 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nr876\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:48 crc kubenswrapper[4935]: I1201 18:55:48.031547 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-scripts\") pod \"nova-cell0-conductor-db-sync-nr876\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:48 crc kubenswrapper[4935]: I1201 18:55:48.055808 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5g66\" (UniqueName: \"kubernetes.io/projected/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-kube-api-access-w5g66\") pod \"nova-cell0-conductor-db-sync-nr876\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:48 crc kubenswrapper[4935]: I1201 18:55:48.352640 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:55:48 crc kubenswrapper[4935]: E1201 18:55:48.544444 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 01 18:55:48 crc kubenswrapper[4935]: E1201 18:55:48.581079 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 01 18:55:48 crc kubenswrapper[4935]: E1201 18:55:48.628350 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 01 18:55:48 crc kubenswrapper[4935]: E1201 18:55:48.628426 4935 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/heat-engine-7b798c567c-9slfq" podUID="67884ebb-46bf-417e-b499-776f74720d64" containerName="heat-engine" Dec 01 18:55:48 crc kubenswrapper[4935]: I1201 18:55:48.801452 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="ceilometer-central-agent" containerID="cri-o://4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18" gracePeriod=30 Dec 01 18:55:48 crc kubenswrapper[4935]: I1201 18:55:48.802114 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="proxy-httpd" containerID="cri-o://5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a" gracePeriod=30 Dec 01 18:55:48 crc kubenswrapper[4935]: I1201 18:55:48.802185 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="sg-core" containerID="cri-o://eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807" gracePeriod=30 Dec 01 18:55:48 crc kubenswrapper[4935]: I1201 18:55:48.802257 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="ceilometer-notification-agent" containerID="cri-o://3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9" gracePeriod=30 Dec 01 18:55:49 crc kubenswrapper[4935]: I1201 18:55:49.053068 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nr876"] Dec 01 18:55:49 crc kubenswrapper[4935]: I1201 18:55:49.814647 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nr876" event={"ID":"4b8ef570-0e9e-426b-a8dc-dadd94b78be3","Type":"ContainerStarted","Data":"92f91ddbc366b404833d20b75dfe31428091752d6758e3d7333e46d953b4bab0"} Dec 01 18:55:49 crc kubenswrapper[4935]: I1201 18:55:49.820128 4935 generic.go:334] "Generic (PLEG): container finished" podID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerID="5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a" exitCode=0 Dec 01 18:55:49 crc kubenswrapper[4935]: I1201 18:55:49.820181 4935 generic.go:334] "Generic (PLEG): container finished" podID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerID="eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807" exitCode=2 Dec 01 18:55:49 crc kubenswrapper[4935]: I1201 18:55:49.820175 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c773f2fd-7296-4d82-8a48-ed7acefcaac0","Type":"ContainerDied","Data":"5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a"} Dec 01 18:55:49 crc kubenswrapper[4935]: I1201 18:55:49.820194 4935 generic.go:334] "Generic (PLEG): container finished" podID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerID="3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9" exitCode=0 Dec 01 18:55:49 crc kubenswrapper[4935]: I1201 18:55:49.820215 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c773f2fd-7296-4d82-8a48-ed7acefcaac0","Type":"ContainerDied","Data":"eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807"} Dec 01 18:55:49 crc kubenswrapper[4935]: I1201 18:55:49.820225 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c773f2fd-7296-4d82-8a48-ed7acefcaac0","Type":"ContainerDied","Data":"3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9"} Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.495977 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.605774 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-scripts\") pod \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.605897 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-combined-ca-bundle\") pod \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.605994 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c773f2fd-7296-4d82-8a48-ed7acefcaac0-run-httpd\") pod \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.606016 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c773f2fd-7296-4d82-8a48-ed7acefcaac0-log-httpd\") pod \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.606047 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-config-data\") pod \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.606079 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-sg-core-conf-yaml\") pod \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.606109 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pr448\" (UniqueName: \"kubernetes.io/projected/c773f2fd-7296-4d82-8a48-ed7acefcaac0-kube-api-access-pr448\") pod \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\" (UID: \"c773f2fd-7296-4d82-8a48-ed7acefcaac0\") " Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.607563 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c773f2fd-7296-4d82-8a48-ed7acefcaac0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c773f2fd-7296-4d82-8a48-ed7acefcaac0" (UID: "c773f2fd-7296-4d82-8a48-ed7acefcaac0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.610769 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c773f2fd-7296-4d82-8a48-ed7acefcaac0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c773f2fd-7296-4d82-8a48-ed7acefcaac0" (UID: "c773f2fd-7296-4d82-8a48-ed7acefcaac0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.612331 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c773f2fd-7296-4d82-8a48-ed7acefcaac0-kube-api-access-pr448" (OuterVolumeSpecName: "kube-api-access-pr448") pod "c773f2fd-7296-4d82-8a48-ed7acefcaac0" (UID: "c773f2fd-7296-4d82-8a48-ed7acefcaac0"). InnerVolumeSpecName "kube-api-access-pr448". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.622345 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-scripts" (OuterVolumeSpecName: "scripts") pod "c773f2fd-7296-4d82-8a48-ed7acefcaac0" (UID: "c773f2fd-7296-4d82-8a48-ed7acefcaac0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.646471 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c773f2fd-7296-4d82-8a48-ed7acefcaac0" (UID: "c773f2fd-7296-4d82-8a48-ed7acefcaac0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.710827 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.710854 4935 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c773f2fd-7296-4d82-8a48-ed7acefcaac0-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.710864 4935 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c773f2fd-7296-4d82-8a48-ed7acefcaac0-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.710874 4935 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.710884 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pr448\" (UniqueName: \"kubernetes.io/projected/c773f2fd-7296-4d82-8a48-ed7acefcaac0-kube-api-access-pr448\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.741790 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c773f2fd-7296-4d82-8a48-ed7acefcaac0" (UID: "c773f2fd-7296-4d82-8a48-ed7acefcaac0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.744426 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-config-data" (OuterVolumeSpecName: "config-data") pod "c773f2fd-7296-4d82-8a48-ed7acefcaac0" (UID: "c773f2fd-7296-4d82-8a48-ed7acefcaac0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.815247 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.815282 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c773f2fd-7296-4d82-8a48-ed7acefcaac0-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.842518 4935 generic.go:334] "Generic (PLEG): container finished" podID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerID="4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18" exitCode=0 Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.842567 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c773f2fd-7296-4d82-8a48-ed7acefcaac0","Type":"ContainerDied","Data":"4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18"} Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.842578 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.842598 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c773f2fd-7296-4d82-8a48-ed7acefcaac0","Type":"ContainerDied","Data":"3f8d3f9bedddc8acfe9eb8d773b6a3255b63dda484b3ee8f4f1ab8554957feee"} Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.842618 4935 scope.go:117] "RemoveContainer" containerID="5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.874966 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.884228 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.888424 4935 scope.go:117] "RemoveContainer" containerID="eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.917949 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:51 crc kubenswrapper[4935]: E1201 18:55:51.919026 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="sg-core" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.919042 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="sg-core" Dec 01 18:55:51 crc kubenswrapper[4935]: E1201 18:55:51.919064 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="proxy-httpd" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.919070 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="proxy-httpd" Dec 01 18:55:51 crc kubenswrapper[4935]: E1201 18:55:51.919090 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b1755ee-18df-46ef-be6d-b81b7967d831" containerName="heat-api" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.919097 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b1755ee-18df-46ef-be6d-b81b7967d831" containerName="heat-api" Dec 01 18:55:51 crc kubenswrapper[4935]: E1201 18:55:51.919109 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="ceilometer-central-agent" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.919115 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="ceilometer-central-agent" Dec 01 18:55:51 crc kubenswrapper[4935]: E1201 18:55:51.919159 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="ceilometer-notification-agent" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.919165 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="ceilometer-notification-agent" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.919388 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7f13e0e-b593-44c8-b015-3bb2d32b896a" containerName="heat-cfnapi" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.919408 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="ceilometer-notification-agent" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.919421 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="proxy-httpd" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.919430 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b1755ee-18df-46ef-be6d-b81b7967d831" containerName="heat-api" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.919444 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="sg-core" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.919452 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" containerName="ceilometer-central-agent" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.921563 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.923997 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.924355 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.929745 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:51 crc kubenswrapper[4935]: I1201 18:55:51.987398 4935 scope.go:117] "RemoveContainer" containerID="3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.019489 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2z78\" (UniqueName: \"kubernetes.io/projected/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-kube-api-access-l2z78\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.019775 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-scripts\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.019845 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-log-httpd\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.019871 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.019908 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.019953 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-run-httpd\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.019996 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-config-data\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.040238 4935 scope.go:117] "RemoveContainer" containerID="4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.104485 4935 scope.go:117] "RemoveContainer" containerID="5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a" Dec 01 18:55:52 crc kubenswrapper[4935]: E1201 18:55:52.105400 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a\": container with ID starting with 5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a not found: ID does not exist" containerID="5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.105446 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a"} err="failed to get container status \"5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a\": rpc error: code = NotFound desc = could not find container \"5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a\": container with ID starting with 5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a not found: ID does not exist" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.105471 4935 scope.go:117] "RemoveContainer" containerID="eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807" Dec 01 18:55:52 crc kubenswrapper[4935]: E1201 18:55:52.105962 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807\": container with ID starting with eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807 not found: ID does not exist" containerID="eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.106007 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807"} err="failed to get container status \"eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807\": rpc error: code = NotFound desc = could not find container \"eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807\": container with ID starting with eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807 not found: ID does not exist" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.106040 4935 scope.go:117] "RemoveContainer" containerID="3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9" Dec 01 18:55:52 crc kubenswrapper[4935]: E1201 18:55:52.106817 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9\": container with ID starting with 3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9 not found: ID does not exist" containerID="3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.106858 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9"} err="failed to get container status \"3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9\": rpc error: code = NotFound desc = could not find container \"3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9\": container with ID starting with 3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9 not found: ID does not exist" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.106884 4935 scope.go:117] "RemoveContainer" containerID="4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18" Dec 01 18:55:52 crc kubenswrapper[4935]: E1201 18:55:52.107121 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18\": container with ID starting with 4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18 not found: ID does not exist" containerID="4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.107139 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18"} err="failed to get container status \"4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18\": rpc error: code = NotFound desc = could not find container \"4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18\": container with ID starting with 4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18 not found: ID does not exist" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.121991 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-log-httpd\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.122032 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.122071 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.122120 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-run-httpd\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.122180 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-config-data\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.122223 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2z78\" (UniqueName: \"kubernetes.io/projected/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-kube-api-access-l2z78\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.122266 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-scripts\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.123222 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-log-httpd\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.123832 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-run-httpd\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.129993 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.130225 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-config-data\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.138283 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.146713 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-scripts\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.181236 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2z78\" (UniqueName: \"kubernetes.io/projected/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-kube-api-access-l2z78\") pod \"ceilometer-0\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.255067 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.539421 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c773f2fd-7296-4d82-8a48-ed7acefcaac0" path="/var/lib/kubelet/pods/c773f2fd-7296-4d82-8a48-ed7acefcaac0/volumes" Dec 01 18:55:52 crc kubenswrapper[4935]: I1201 18:55:52.953665 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:53 crc kubenswrapper[4935]: I1201 18:55:53.870654 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2","Type":"ContainerStarted","Data":"c1e82a70e72a741a206b50100f076826a6d8a42882bc24c27bc000e206a8896a"} Dec 01 18:55:53 crc kubenswrapper[4935]: I1201 18:55:53.920038 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:55:54 crc kubenswrapper[4935]: I1201 18:55:54.350645 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 18:55:54 crc kubenswrapper[4935]: I1201 18:55:54.350702 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 18:55:54 crc kubenswrapper[4935]: I1201 18:55:54.350745 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 18:55:54 crc kubenswrapper[4935]: I1201 18:55:54.351642 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 18:55:54 crc kubenswrapper[4935]: I1201 18:55:54.351706 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" gracePeriod=600 Dec 01 18:55:54 crc kubenswrapper[4935]: I1201 18:55:54.894079 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" exitCode=0 Dec 01 18:55:54 crc kubenswrapper[4935]: I1201 18:55:54.894169 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842"} Dec 01 18:55:54 crc kubenswrapper[4935]: I1201 18:55:54.894384 4935 scope.go:117] "RemoveContainer" containerID="8d43b47ae64729f61d960fc5685829c02da961e532465f8f3fc4e3129716002b" Dec 01 18:55:58 crc kubenswrapper[4935]: E1201 18:55:58.541874 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 01 18:55:58 crc kubenswrapper[4935]: E1201 18:55:58.543513 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 01 18:55:58 crc kubenswrapper[4935]: E1201 18:55:58.546634 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 01 18:55:58 crc kubenswrapper[4935]: E1201 18:55:58.546675 4935 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/heat-engine-7b798c567c-9slfq" podUID="67884ebb-46bf-417e-b499-776f74720d64" containerName="heat-engine" Dec 01 18:56:01 crc kubenswrapper[4935]: E1201 18:56:01.647966 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.021174 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:56:02 crc kubenswrapper[4935]: E1201 18:56:02.021863 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.024034 4935 generic.go:334] "Generic (PLEG): container finished" podID="67884ebb-46bf-417e-b499-776f74720d64" containerID="0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8" exitCode=0 Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.024071 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7b798c567c-9slfq" event={"ID":"67884ebb-46bf-417e-b499-776f74720d64","Type":"ContainerDied","Data":"0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8"} Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.279613 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.398834 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-combined-ca-bundle\") pod \"67884ebb-46bf-417e-b499-776f74720d64\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.398924 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxtmf\" (UniqueName: \"kubernetes.io/projected/67884ebb-46bf-417e-b499-776f74720d64-kube-api-access-sxtmf\") pod \"67884ebb-46bf-417e-b499-776f74720d64\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.398965 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-config-data-custom\") pod \"67884ebb-46bf-417e-b499-776f74720d64\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.399015 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-config-data\") pod \"67884ebb-46bf-417e-b499-776f74720d64\" (UID: \"67884ebb-46bf-417e-b499-776f74720d64\") " Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.404450 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67884ebb-46bf-417e-b499-776f74720d64-kube-api-access-sxtmf" (OuterVolumeSpecName: "kube-api-access-sxtmf") pod "67884ebb-46bf-417e-b499-776f74720d64" (UID: "67884ebb-46bf-417e-b499-776f74720d64"). InnerVolumeSpecName "kube-api-access-sxtmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.405241 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "67884ebb-46bf-417e-b499-776f74720d64" (UID: "67884ebb-46bf-417e-b499-776f74720d64"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.450349 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "67884ebb-46bf-417e-b499-776f74720d64" (UID: "67884ebb-46bf-417e-b499-776f74720d64"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.475078 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-config-data" (OuterVolumeSpecName: "config-data") pod "67884ebb-46bf-417e-b499-776f74720d64" (UID: "67884ebb-46bf-417e-b499-776f74720d64"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.502109 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxtmf\" (UniqueName: \"kubernetes.io/projected/67884ebb-46bf-417e-b499-776f74720d64-kube-api-access-sxtmf\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.502168 4935 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.502183 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:02 crc kubenswrapper[4935]: I1201 18:56:02.502193 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67884ebb-46bf-417e-b499-776f74720d64-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:03 crc kubenswrapper[4935]: I1201 18:56:03.036127 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2","Type":"ContainerStarted","Data":"f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba"} Dec 01 18:56:03 crc kubenswrapper[4935]: I1201 18:56:03.036433 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2","Type":"ContainerStarted","Data":"801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52"} Dec 01 18:56:03 crc kubenswrapper[4935]: I1201 18:56:03.038876 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nr876" event={"ID":"4b8ef570-0e9e-426b-a8dc-dadd94b78be3","Type":"ContainerStarted","Data":"9b45cf6363caf1545d66a95580099b8f744adba96b4dad41d42b6c531c5b9bcd"} Dec 01 18:56:03 crc kubenswrapper[4935]: I1201 18:56:03.040375 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7b798c567c-9slfq" event={"ID":"67884ebb-46bf-417e-b499-776f74720d64","Type":"ContainerDied","Data":"73f7ac0269667294b97f2112dcc3b20b5329a96fad806ec0981852e419f2b7c4"} Dec 01 18:56:03 crc kubenswrapper[4935]: I1201 18:56:03.040439 4935 scope.go:117] "RemoveContainer" containerID="0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8" Dec 01 18:56:03 crc kubenswrapper[4935]: I1201 18:56:03.040450 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7b798c567c-9slfq" Dec 01 18:56:03 crc kubenswrapper[4935]: I1201 18:56:03.073702 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-nr876" podStartSLOduration=3.405309302 podStartE2EDuration="16.073684282s" podCreationTimestamp="2025-12-01 18:55:47 +0000 UTC" firstStartedPulling="2025-12-01 18:55:49.077685081 +0000 UTC m=+1563.099314340" lastFinishedPulling="2025-12-01 18:56:01.746060061 +0000 UTC m=+1575.767689320" observedRunningTime="2025-12-01 18:56:03.057974588 +0000 UTC m=+1577.079603847" watchObservedRunningTime="2025-12-01 18:56:03.073684282 +0000 UTC m=+1577.095313541" Dec 01 18:56:03 crc kubenswrapper[4935]: I1201 18:56:03.099921 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-7b798c567c-9slfq"] Dec 01 18:56:03 crc kubenswrapper[4935]: I1201 18:56:03.115400 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-engine-7b798c567c-9slfq"] Dec 01 18:56:04 crc kubenswrapper[4935]: I1201 18:56:04.053936 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2","Type":"ContainerStarted","Data":"06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964"} Dec 01 18:56:04 crc kubenswrapper[4935]: I1201 18:56:04.524972 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67884ebb-46bf-417e-b499-776f74720d64" path="/var/lib/kubelet/pods/67884ebb-46bf-417e-b499-776f74720d64/volumes" Dec 01 18:56:06 crc kubenswrapper[4935]: I1201 18:56:06.074910 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2","Type":"ContainerStarted","Data":"3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2"} Dec 01 18:56:06 crc kubenswrapper[4935]: I1201 18:56:06.075467 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 18:56:06 crc kubenswrapper[4935]: I1201 18:56:06.075121 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="sg-core" containerID="cri-o://06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964" gracePeriod=30 Dec 01 18:56:06 crc kubenswrapper[4935]: I1201 18:56:06.075108 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="proxy-httpd" containerID="cri-o://3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2" gracePeriod=30 Dec 01 18:56:06 crc kubenswrapper[4935]: I1201 18:56:06.075207 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="ceilometer-notification-agent" containerID="cri-o://f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba" gracePeriod=30 Dec 01 18:56:06 crc kubenswrapper[4935]: I1201 18:56:06.075057 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="ceilometer-central-agent" containerID="cri-o://801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52" gracePeriod=30 Dec 01 18:56:06 crc kubenswrapper[4935]: I1201 18:56:06.114732 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.847496635 podStartE2EDuration="15.114709698s" podCreationTimestamp="2025-12-01 18:55:51 +0000 UTC" firstStartedPulling="2025-12-01 18:55:52.980013069 +0000 UTC m=+1567.001642328" lastFinishedPulling="2025-12-01 18:56:05.247226092 +0000 UTC m=+1579.268855391" observedRunningTime="2025-12-01 18:56:06.104036932 +0000 UTC m=+1580.125666181" watchObservedRunningTime="2025-12-01 18:56:06.114709698 +0000 UTC m=+1580.136338957" Dec 01 18:56:07 crc kubenswrapper[4935]: I1201 18:56:07.092568 4935 generic.go:334] "Generic (PLEG): container finished" podID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerID="3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2" exitCode=0 Dec 01 18:56:07 crc kubenswrapper[4935]: I1201 18:56:07.092912 4935 generic.go:334] "Generic (PLEG): container finished" podID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerID="06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964" exitCode=2 Dec 01 18:56:07 crc kubenswrapper[4935]: I1201 18:56:07.092667 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2","Type":"ContainerDied","Data":"3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2"} Dec 01 18:56:07 crc kubenswrapper[4935]: I1201 18:56:07.092972 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2","Type":"ContainerDied","Data":"06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964"} Dec 01 18:56:07 crc kubenswrapper[4935]: I1201 18:56:07.092992 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2","Type":"ContainerDied","Data":"f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba"} Dec 01 18:56:07 crc kubenswrapper[4935]: I1201 18:56:07.092929 4935 generic.go:334] "Generic (PLEG): container finished" podID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerID="f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba" exitCode=0 Dec 01 18:56:08 crc kubenswrapper[4935]: I1201 18:56:08.926317 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vnmwl"] Dec 01 18:56:08 crc kubenswrapper[4935]: E1201 18:56:08.927316 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67884ebb-46bf-417e-b499-776f74720d64" containerName="heat-engine" Dec 01 18:56:08 crc kubenswrapper[4935]: I1201 18:56:08.927332 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="67884ebb-46bf-417e-b499-776f74720d64" containerName="heat-engine" Dec 01 18:56:08 crc kubenswrapper[4935]: I1201 18:56:08.927590 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="67884ebb-46bf-417e-b499-776f74720d64" containerName="heat-engine" Dec 01 18:56:08 crc kubenswrapper[4935]: I1201 18:56:08.929295 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:08 crc kubenswrapper[4935]: I1201 18:56:08.941793 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vnmwl"] Dec 01 18:56:08 crc kubenswrapper[4935]: I1201 18:56:08.966361 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-utilities\") pod \"community-operators-vnmwl\" (UID: \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\") " pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:08 crc kubenswrapper[4935]: I1201 18:56:08.966504 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-catalog-content\") pod \"community-operators-vnmwl\" (UID: \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\") " pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:08 crc kubenswrapper[4935]: I1201 18:56:08.966589 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw4h6\" (UniqueName: \"kubernetes.io/projected/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-kube-api-access-cw4h6\") pod \"community-operators-vnmwl\" (UID: \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\") " pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:09 crc kubenswrapper[4935]: I1201 18:56:09.067978 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw4h6\" (UniqueName: \"kubernetes.io/projected/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-kube-api-access-cw4h6\") pod \"community-operators-vnmwl\" (UID: \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\") " pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:09 crc kubenswrapper[4935]: I1201 18:56:09.068079 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-utilities\") pod \"community-operators-vnmwl\" (UID: \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\") " pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:09 crc kubenswrapper[4935]: I1201 18:56:09.068254 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-catalog-content\") pod \"community-operators-vnmwl\" (UID: \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\") " pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:09 crc kubenswrapper[4935]: I1201 18:56:09.068573 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-utilities\") pod \"community-operators-vnmwl\" (UID: \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\") " pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:09 crc kubenswrapper[4935]: I1201 18:56:09.068736 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-catalog-content\") pod \"community-operators-vnmwl\" (UID: \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\") " pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:09 crc kubenswrapper[4935]: I1201 18:56:09.087935 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw4h6\" (UniqueName: \"kubernetes.io/projected/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-kube-api-access-cw4h6\") pod \"community-operators-vnmwl\" (UID: \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\") " pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:09 crc kubenswrapper[4935]: I1201 18:56:09.255715 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:09 crc kubenswrapper[4935]: I1201 18:56:09.764630 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vnmwl"] Dec 01 18:56:10 crc kubenswrapper[4935]: I1201 18:56:10.127508 4935 generic.go:334] "Generic (PLEG): container finished" podID="5466cd9c-f886-4bb7-83c0-2c7a24e339f5" containerID="8ccabbc4ad2e0720ff9ea7fe650ef01eaecdbd385fcaa1a087d423d0def703ef" exitCode=0 Dec 01 18:56:10 crc kubenswrapper[4935]: I1201 18:56:10.127658 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vnmwl" event={"ID":"5466cd9c-f886-4bb7-83c0-2c7a24e339f5","Type":"ContainerDied","Data":"8ccabbc4ad2e0720ff9ea7fe650ef01eaecdbd385fcaa1a087d423d0def703ef"} Dec 01 18:56:10 crc kubenswrapper[4935]: I1201 18:56:10.128642 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vnmwl" event={"ID":"5466cd9c-f886-4bb7-83c0-2c7a24e339f5","Type":"ContainerStarted","Data":"31f54f934e383cd71528c251f15005fbb5f285bbca8a9f93f11577102223d3fe"} Dec 01 18:56:12 crc kubenswrapper[4935]: I1201 18:56:12.151405 4935 generic.go:334] "Generic (PLEG): container finished" podID="5466cd9c-f886-4bb7-83c0-2c7a24e339f5" containerID="a28f38167f2274ce7b5acc2a874c758655890ae3d6281a5c788180e885670b1d" exitCode=0 Dec 01 18:56:12 crc kubenswrapper[4935]: I1201 18:56:12.151462 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vnmwl" event={"ID":"5466cd9c-f886-4bb7-83c0-2c7a24e339f5","Type":"ContainerDied","Data":"a28f38167f2274ce7b5acc2a874c758655890ae3d6281a5c788180e885670b1d"} Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.124214 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.176853 4935 generic.go:334] "Generic (PLEG): container finished" podID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerID="801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52" exitCode=0 Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.176929 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.176950 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2","Type":"ContainerDied","Data":"801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52"} Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.176987 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2","Type":"ContainerDied","Data":"c1e82a70e72a741a206b50100f076826a6d8a42882bc24c27bc000e206a8896a"} Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.177010 4935 scope.go:117] "RemoveContainer" containerID="3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.183766 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vnmwl" event={"ID":"5466cd9c-f886-4bb7-83c0-2c7a24e339f5","Type":"ContainerStarted","Data":"6b25caf61d47c0a9c87a67368dcda0f5583508ea3e2cff4f99d24b413699ae89"} Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.204571 4935 scope.go:117] "RemoveContainer" containerID="06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.220584 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vnmwl" podStartSLOduration=2.798524974 podStartE2EDuration="5.220560161s" podCreationTimestamp="2025-12-01 18:56:08 +0000 UTC" firstStartedPulling="2025-12-01 18:56:10.12951701 +0000 UTC m=+1584.151146269" lastFinishedPulling="2025-12-01 18:56:12.551552187 +0000 UTC m=+1586.573181456" observedRunningTime="2025-12-01 18:56:13.208727178 +0000 UTC m=+1587.230356437" watchObservedRunningTime="2025-12-01 18:56:13.220560161 +0000 UTC m=+1587.242189420" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.247260 4935 scope.go:117] "RemoveContainer" containerID="f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.272428 4935 scope.go:117] "RemoveContainer" containerID="801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.281602 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-run-httpd\") pod \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.281750 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-scripts\") pod \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.281843 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-log-httpd\") pod \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.281925 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-sg-core-conf-yaml\") pod \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.282158 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" (UID: "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.282341 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" (UID: "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.282688 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-combined-ca-bundle\") pod \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.282811 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-config-data\") pod \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.282882 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2z78\" (UniqueName: \"kubernetes.io/projected/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-kube-api-access-l2z78\") pod \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\" (UID: \"e06bff0e-d3dc-42fe-a6f0-83d08a36fad2\") " Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.283924 4935 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.283958 4935 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.294603 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-scripts" (OuterVolumeSpecName: "scripts") pod "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" (UID: "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.294695 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-kube-api-access-l2z78" (OuterVolumeSpecName: "kube-api-access-l2z78") pod "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" (UID: "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2"). InnerVolumeSpecName "kube-api-access-l2z78". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.316897 4935 scope.go:117] "RemoveContainer" containerID="3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2" Dec 01 18:56:13 crc kubenswrapper[4935]: E1201 18:56:13.317429 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2\": container with ID starting with 3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2 not found: ID does not exist" containerID="3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.317476 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2"} err="failed to get container status \"3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2\": rpc error: code = NotFound desc = could not find container \"3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2\": container with ID starting with 3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2 not found: ID does not exist" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.317506 4935 scope.go:117] "RemoveContainer" containerID="06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964" Dec 01 18:56:13 crc kubenswrapper[4935]: E1201 18:56:13.318008 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964\": container with ID starting with 06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964 not found: ID does not exist" containerID="06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.318045 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964"} err="failed to get container status \"06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964\": rpc error: code = NotFound desc = could not find container \"06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964\": container with ID starting with 06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964 not found: ID does not exist" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.318063 4935 scope.go:117] "RemoveContainer" containerID="f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba" Dec 01 18:56:13 crc kubenswrapper[4935]: E1201 18:56:13.318376 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba\": container with ID starting with f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba not found: ID does not exist" containerID="f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.318420 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba"} err="failed to get container status \"f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba\": rpc error: code = NotFound desc = could not find container \"f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba\": container with ID starting with f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba not found: ID does not exist" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.318450 4935 scope.go:117] "RemoveContainer" containerID="801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52" Dec 01 18:56:13 crc kubenswrapper[4935]: E1201 18:56:13.318727 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52\": container with ID starting with 801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52 not found: ID does not exist" containerID="801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.318763 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52"} err="failed to get container status \"801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52\": rpc error: code = NotFound desc = could not find container \"801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52\": container with ID starting with 801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52 not found: ID does not exist" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.320093 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" (UID: "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.386355 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2z78\" (UniqueName: \"kubernetes.io/projected/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-kube-api-access-l2z78\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.386392 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.386407 4935 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.390015 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" (UID: "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.418040 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-config-data" (OuterVolumeSpecName: "config-data") pod "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" (UID: "e06bff0e-d3dc-42fe-a6f0-83d08a36fad2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.489388 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.489423 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.512279 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.520316 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.545448 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:13 crc kubenswrapper[4935]: E1201 18:56:13.545891 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="ceilometer-central-agent" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.545908 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="ceilometer-central-agent" Dec 01 18:56:13 crc kubenswrapper[4935]: E1201 18:56:13.545931 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="proxy-httpd" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.545937 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="proxy-httpd" Dec 01 18:56:13 crc kubenswrapper[4935]: E1201 18:56:13.545949 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="sg-core" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.545956 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="sg-core" Dec 01 18:56:13 crc kubenswrapper[4935]: E1201 18:56:13.545973 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="ceilometer-notification-agent" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.545979 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="ceilometer-notification-agent" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.546199 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="ceilometer-notification-agent" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.546221 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="proxy-httpd" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.546233 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="sg-core" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.546244 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" containerName="ceilometer-central-agent" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.548191 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.550555 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.554647 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.570163 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.693372 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-config-data\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.693739 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37e365-636d-43ea-8479-e5e5fdebe49b-log-httpd\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.694640 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37e365-636d-43ea-8479-e5e5fdebe49b-run-httpd\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.694729 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnrmm\" (UniqueName: \"kubernetes.io/projected/cf37e365-636d-43ea-8479-e5e5fdebe49b-kube-api-access-fnrmm\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.694801 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-scripts\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.694868 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.694922 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.797163 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37e365-636d-43ea-8479-e5e5fdebe49b-run-httpd\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.797220 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnrmm\" (UniqueName: \"kubernetes.io/projected/cf37e365-636d-43ea-8479-e5e5fdebe49b-kube-api-access-fnrmm\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.797252 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-scripts\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.797284 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.797314 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.797344 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-config-data\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.797434 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37e365-636d-43ea-8479-e5e5fdebe49b-log-httpd\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.797629 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37e365-636d-43ea-8479-e5e5fdebe49b-run-httpd\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.797837 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37e365-636d-43ea-8479-e5e5fdebe49b-log-httpd\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.804078 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-scripts\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.805875 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.809602 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.824174 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-config-data\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.848237 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnrmm\" (UniqueName: \"kubernetes.io/projected/cf37e365-636d-43ea-8479-e5e5fdebe49b-kube-api-access-fnrmm\") pod \"ceilometer-0\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " pod="openstack/ceilometer-0" Dec 01 18:56:13 crc kubenswrapper[4935]: I1201 18:56:13.870619 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:14 crc kubenswrapper[4935]: I1201 18:56:14.202413 4935 generic.go:334] "Generic (PLEG): container finished" podID="4b8ef570-0e9e-426b-a8dc-dadd94b78be3" containerID="9b45cf6363caf1545d66a95580099b8f744adba96b4dad41d42b6c531c5b9bcd" exitCode=0 Dec 01 18:56:14 crc kubenswrapper[4935]: I1201 18:56:14.203583 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nr876" event={"ID":"4b8ef570-0e9e-426b-a8dc-dadd94b78be3","Type":"ContainerDied","Data":"9b45cf6363caf1545d66a95580099b8f744adba96b4dad41d42b6c531c5b9bcd"} Dec 01 18:56:14 crc kubenswrapper[4935]: I1201 18:56:14.447658 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:14 crc kubenswrapper[4935]: I1201 18:56:14.508087 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:56:14 crc kubenswrapper[4935]: E1201 18:56:14.508373 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:56:14 crc kubenswrapper[4935]: I1201 18:56:14.519523 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e06bff0e-d3dc-42fe-a6f0-83d08a36fad2" path="/var/lib/kubelet/pods/e06bff0e-d3dc-42fe-a6f0-83d08a36fad2/volumes" Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.219114 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37e365-636d-43ea-8479-e5e5fdebe49b","Type":"ContainerStarted","Data":"da78eb1ca6f5a2853e14a37657a306d2ff864814a754891c9ac30f48bed97f96"} Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.553029 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.643230 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-scripts\") pod \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.643613 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-config-data\") pod \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.643683 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-combined-ca-bundle\") pod \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.643873 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5g66\" (UniqueName: \"kubernetes.io/projected/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-kube-api-access-w5g66\") pod \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\" (UID: \"4b8ef570-0e9e-426b-a8dc-dadd94b78be3\") " Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.647814 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-scripts" (OuterVolumeSpecName: "scripts") pod "4b8ef570-0e9e-426b-a8dc-dadd94b78be3" (UID: "4b8ef570-0e9e-426b-a8dc-dadd94b78be3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.657537 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-kube-api-access-w5g66" (OuterVolumeSpecName: "kube-api-access-w5g66") pod "4b8ef570-0e9e-426b-a8dc-dadd94b78be3" (UID: "4b8ef570-0e9e-426b-a8dc-dadd94b78be3"). InnerVolumeSpecName "kube-api-access-w5g66". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.676292 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b8ef570-0e9e-426b-a8dc-dadd94b78be3" (UID: "4b8ef570-0e9e-426b-a8dc-dadd94b78be3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.678617 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-config-data" (OuterVolumeSpecName: "config-data") pod "4b8ef570-0e9e-426b-a8dc-dadd94b78be3" (UID: "4b8ef570-0e9e-426b-a8dc-dadd94b78be3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.746518 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.746555 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.746570 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:15 crc kubenswrapper[4935]: I1201 18:56:15.746585 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5g66\" (UniqueName: \"kubernetes.io/projected/4b8ef570-0e9e-426b-a8dc-dadd94b78be3-kube-api-access-w5g66\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.230475 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37e365-636d-43ea-8479-e5e5fdebe49b","Type":"ContainerStarted","Data":"f3161deb7ff875fbebb478b2ead89e0c321555814e4313da96e8f9b47292d531"} Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.233082 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nr876" event={"ID":"4b8ef570-0e9e-426b-a8dc-dadd94b78be3","Type":"ContainerDied","Data":"92f91ddbc366b404833d20b75dfe31428091752d6758e3d7333e46d953b4bab0"} Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.233134 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="92f91ddbc366b404833d20b75dfe31428091752d6758e3d7333e46d953b4bab0" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.233199 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nr876" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.350390 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 18:56:16 crc kubenswrapper[4935]: E1201 18:56:16.350849 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b8ef570-0e9e-426b-a8dc-dadd94b78be3" containerName="nova-cell0-conductor-db-sync" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.350867 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b8ef570-0e9e-426b-a8dc-dadd94b78be3" containerName="nova-cell0-conductor-db-sync" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.351104 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b8ef570-0e9e-426b-a8dc-dadd94b78be3" containerName="nova-cell0-conductor-db-sync" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.352039 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.360679 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.362799 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-bmk65" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.373595 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.462242 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af39aa76-e3fe-4e4d-9d7a-75c43da4f301-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"af39aa76-e3fe-4e4d-9d7a-75c43da4f301\") " pod="openstack/nova-cell0-conductor-0" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.462651 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af39aa76-e3fe-4e4d-9d7a-75c43da4f301-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"af39aa76-e3fe-4e4d-9d7a-75c43da4f301\") " pod="openstack/nova-cell0-conductor-0" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.463101 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prmhx\" (UniqueName: \"kubernetes.io/projected/af39aa76-e3fe-4e4d-9d7a-75c43da4f301-kube-api-access-prmhx\") pod \"nova-cell0-conductor-0\" (UID: \"af39aa76-e3fe-4e4d-9d7a-75c43da4f301\") " pod="openstack/nova-cell0-conductor-0" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.565036 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af39aa76-e3fe-4e4d-9d7a-75c43da4f301-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"af39aa76-e3fe-4e4d-9d7a-75c43da4f301\") " pod="openstack/nova-cell0-conductor-0" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.565098 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prmhx\" (UniqueName: \"kubernetes.io/projected/af39aa76-e3fe-4e4d-9d7a-75c43da4f301-kube-api-access-prmhx\") pod \"nova-cell0-conductor-0\" (UID: \"af39aa76-e3fe-4e4d-9d7a-75c43da4f301\") " pod="openstack/nova-cell0-conductor-0" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.565300 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af39aa76-e3fe-4e4d-9d7a-75c43da4f301-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"af39aa76-e3fe-4e4d-9d7a-75c43da4f301\") " pod="openstack/nova-cell0-conductor-0" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.569817 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af39aa76-e3fe-4e4d-9d7a-75c43da4f301-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"af39aa76-e3fe-4e4d-9d7a-75c43da4f301\") " pod="openstack/nova-cell0-conductor-0" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.569900 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af39aa76-e3fe-4e4d-9d7a-75c43da4f301-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"af39aa76-e3fe-4e4d-9d7a-75c43da4f301\") " pod="openstack/nova-cell0-conductor-0" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.584592 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prmhx\" (UniqueName: \"kubernetes.io/projected/af39aa76-e3fe-4e4d-9d7a-75c43da4f301-kube-api-access-prmhx\") pod \"nova-cell0-conductor-0\" (UID: \"af39aa76-e3fe-4e4d-9d7a-75c43da4f301\") " pod="openstack/nova-cell0-conductor-0" Dec 01 18:56:16 crc kubenswrapper[4935]: I1201 18:56:16.674984 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 18:56:17 crc kubenswrapper[4935]: I1201 18:56:17.146338 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 18:56:17 crc kubenswrapper[4935]: I1201 18:56:17.253771 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37e365-636d-43ea-8479-e5e5fdebe49b","Type":"ContainerStarted","Data":"ba6f5ed13f9f65c0d163a78224a1e73b2939b0e697a155ed40265e5e4794037a"} Dec 01 18:56:17 crc kubenswrapper[4935]: I1201 18:56:17.254635 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"af39aa76-e3fe-4e4d-9d7a-75c43da4f301","Type":"ContainerStarted","Data":"3dc7b24fc22127bd65878abf8f5e52e30de9b1441e8bd0d4f8570381adc5c125"} Dec 01 18:56:18 crc kubenswrapper[4935]: I1201 18:56:18.268489 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"af39aa76-e3fe-4e4d-9d7a-75c43da4f301","Type":"ContainerStarted","Data":"92b5c0545186c38e858383791f4c76f995709d6857d87404f306c8527d51f6ef"} Dec 01 18:56:18 crc kubenswrapper[4935]: I1201 18:56:18.268974 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 01 18:56:18 crc kubenswrapper[4935]: I1201 18:56:18.272295 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37e365-636d-43ea-8479-e5e5fdebe49b","Type":"ContainerStarted","Data":"90cc05ddd7494f7ec59be6dabc7c6e5e864c5f96efc20d4782a7d0ef130ae40e"} Dec 01 18:56:18 crc kubenswrapper[4935]: I1201 18:56:18.287036 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.287014841 podStartE2EDuration="2.287014841s" podCreationTimestamp="2025-12-01 18:56:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:56:18.284981677 +0000 UTC m=+1592.306610936" watchObservedRunningTime="2025-12-01 18:56:18.287014841 +0000 UTC m=+1592.308644100" Dec 01 18:56:19 crc kubenswrapper[4935]: I1201 18:56:19.256177 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:19 crc kubenswrapper[4935]: I1201 18:56:19.256651 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:19 crc kubenswrapper[4935]: I1201 18:56:19.288885 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37e365-636d-43ea-8479-e5e5fdebe49b","Type":"ContainerStarted","Data":"eb85e12cacea9f162265f59518cb61846163e7f410a2ff3293b1c21b4aefe465"} Dec 01 18:56:19 crc kubenswrapper[4935]: I1201 18:56:19.313130 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.058233739 podStartE2EDuration="6.313104914s" podCreationTimestamp="2025-12-01 18:56:13 +0000 UTC" firstStartedPulling="2025-12-01 18:56:14.451092084 +0000 UTC m=+1588.472721333" lastFinishedPulling="2025-12-01 18:56:18.705963249 +0000 UTC m=+1592.727592508" observedRunningTime="2025-12-01 18:56:19.306429424 +0000 UTC m=+1593.328058683" watchObservedRunningTime="2025-12-01 18:56:19.313104914 +0000 UTC m=+1593.334734193" Dec 01 18:56:19 crc kubenswrapper[4935]: I1201 18:56:19.321999 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:19 crc kubenswrapper[4935]: I1201 18:56:19.394136 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:19 crc kubenswrapper[4935]: I1201 18:56:19.578429 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vnmwl"] Dec 01 18:56:20 crc kubenswrapper[4935]: I1201 18:56:20.302082 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 18:56:21 crc kubenswrapper[4935]: I1201 18:56:21.320569 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vnmwl" podUID="5466cd9c-f886-4bb7-83c0-2c7a24e339f5" containerName="registry-server" containerID="cri-o://6b25caf61d47c0a9c87a67368dcda0f5583508ea3e2cff4f99d24b413699ae89" gracePeriod=2 Dec 01 18:56:21 crc kubenswrapper[4935]: I1201 18:56:21.929420 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.017981 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-catalog-content\") pod \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\" (UID: \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\") " Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.018238 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cw4h6\" (UniqueName: \"kubernetes.io/projected/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-kube-api-access-cw4h6\") pod \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\" (UID: \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\") " Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.018353 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-utilities\") pod \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\" (UID: \"5466cd9c-f886-4bb7-83c0-2c7a24e339f5\") " Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.019698 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-utilities" (OuterVolumeSpecName: "utilities") pod "5466cd9c-f886-4bb7-83c0-2c7a24e339f5" (UID: "5466cd9c-f886-4bb7-83c0-2c7a24e339f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.031560 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-kube-api-access-cw4h6" (OuterVolumeSpecName: "kube-api-access-cw4h6") pod "5466cd9c-f886-4bb7-83c0-2c7a24e339f5" (UID: "5466cd9c-f886-4bb7-83c0-2c7a24e339f5"). InnerVolumeSpecName "kube-api-access-cw4h6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.086997 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5466cd9c-f886-4bb7-83c0-2c7a24e339f5" (UID: "5466cd9c-f886-4bb7-83c0-2c7a24e339f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.121555 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cw4h6\" (UniqueName: \"kubernetes.io/projected/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-kube-api-access-cw4h6\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.121592 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.121602 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5466cd9c-f886-4bb7-83c0-2c7a24e339f5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.331690 4935 generic.go:334] "Generic (PLEG): container finished" podID="5466cd9c-f886-4bb7-83c0-2c7a24e339f5" containerID="6b25caf61d47c0a9c87a67368dcda0f5583508ea3e2cff4f99d24b413699ae89" exitCode=0 Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.331733 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vnmwl" event={"ID":"5466cd9c-f886-4bb7-83c0-2c7a24e339f5","Type":"ContainerDied","Data":"6b25caf61d47c0a9c87a67368dcda0f5583508ea3e2cff4f99d24b413699ae89"} Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.331762 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vnmwl" event={"ID":"5466cd9c-f886-4bb7-83c0-2c7a24e339f5","Type":"ContainerDied","Data":"31f54f934e383cd71528c251f15005fbb5f285bbca8a9f93f11577102223d3fe"} Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.331780 4935 scope.go:117] "RemoveContainer" containerID="6b25caf61d47c0a9c87a67368dcda0f5583508ea3e2cff4f99d24b413699ae89" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.331781 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vnmwl" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.377013 4935 scope.go:117] "RemoveContainer" containerID="a28f38167f2274ce7b5acc2a874c758655890ae3d6281a5c788180e885670b1d" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.386247 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vnmwl"] Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.398025 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vnmwl"] Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.409360 4935 scope.go:117] "RemoveContainer" containerID="8ccabbc4ad2e0720ff9ea7fe650ef01eaecdbd385fcaa1a087d423d0def703ef" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.478756 4935 scope.go:117] "RemoveContainer" containerID="6b25caf61d47c0a9c87a67368dcda0f5583508ea3e2cff4f99d24b413699ae89" Dec 01 18:56:22 crc kubenswrapper[4935]: E1201 18:56:22.480395 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b25caf61d47c0a9c87a67368dcda0f5583508ea3e2cff4f99d24b413699ae89\": container with ID starting with 6b25caf61d47c0a9c87a67368dcda0f5583508ea3e2cff4f99d24b413699ae89 not found: ID does not exist" containerID="6b25caf61d47c0a9c87a67368dcda0f5583508ea3e2cff4f99d24b413699ae89" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.480422 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b25caf61d47c0a9c87a67368dcda0f5583508ea3e2cff4f99d24b413699ae89"} err="failed to get container status \"6b25caf61d47c0a9c87a67368dcda0f5583508ea3e2cff4f99d24b413699ae89\": rpc error: code = NotFound desc = could not find container \"6b25caf61d47c0a9c87a67368dcda0f5583508ea3e2cff4f99d24b413699ae89\": container with ID starting with 6b25caf61d47c0a9c87a67368dcda0f5583508ea3e2cff4f99d24b413699ae89 not found: ID does not exist" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.480444 4935 scope.go:117] "RemoveContainer" containerID="a28f38167f2274ce7b5acc2a874c758655890ae3d6281a5c788180e885670b1d" Dec 01 18:56:22 crc kubenswrapper[4935]: E1201 18:56:22.480752 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a28f38167f2274ce7b5acc2a874c758655890ae3d6281a5c788180e885670b1d\": container with ID starting with a28f38167f2274ce7b5acc2a874c758655890ae3d6281a5c788180e885670b1d not found: ID does not exist" containerID="a28f38167f2274ce7b5acc2a874c758655890ae3d6281a5c788180e885670b1d" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.480776 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a28f38167f2274ce7b5acc2a874c758655890ae3d6281a5c788180e885670b1d"} err="failed to get container status \"a28f38167f2274ce7b5acc2a874c758655890ae3d6281a5c788180e885670b1d\": rpc error: code = NotFound desc = could not find container \"a28f38167f2274ce7b5acc2a874c758655890ae3d6281a5c788180e885670b1d\": container with ID starting with a28f38167f2274ce7b5acc2a874c758655890ae3d6281a5c788180e885670b1d not found: ID does not exist" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.480788 4935 scope.go:117] "RemoveContainer" containerID="8ccabbc4ad2e0720ff9ea7fe650ef01eaecdbd385fcaa1a087d423d0def703ef" Dec 01 18:56:22 crc kubenswrapper[4935]: E1201 18:56:22.480996 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ccabbc4ad2e0720ff9ea7fe650ef01eaecdbd385fcaa1a087d423d0def703ef\": container with ID starting with 8ccabbc4ad2e0720ff9ea7fe650ef01eaecdbd385fcaa1a087d423d0def703ef not found: ID does not exist" containerID="8ccabbc4ad2e0720ff9ea7fe650ef01eaecdbd385fcaa1a087d423d0def703ef" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.481022 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ccabbc4ad2e0720ff9ea7fe650ef01eaecdbd385fcaa1a087d423d0def703ef"} err="failed to get container status \"8ccabbc4ad2e0720ff9ea7fe650ef01eaecdbd385fcaa1a087d423d0def703ef\": rpc error: code = NotFound desc = could not find container \"8ccabbc4ad2e0720ff9ea7fe650ef01eaecdbd385fcaa1a087d423d0def703ef\": container with ID starting with 8ccabbc4ad2e0720ff9ea7fe650ef01eaecdbd385fcaa1a087d423d0def703ef not found: ID does not exist" Dec 01 18:56:22 crc kubenswrapper[4935]: I1201 18:56:22.519062 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5466cd9c-f886-4bb7-83c0-2c7a24e339f5" path="/var/lib/kubelet/pods/5466cd9c-f886-4bb7-83c0-2c7a24e339f5/volumes" Dec 01 18:56:24 crc kubenswrapper[4935]: I1201 18:56:24.881857 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:24 crc kubenswrapper[4935]: I1201 18:56:24.882464 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="ceilometer-central-agent" containerID="cri-o://f3161deb7ff875fbebb478b2ead89e0c321555814e4313da96e8f9b47292d531" gracePeriod=30 Dec 01 18:56:24 crc kubenswrapper[4935]: I1201 18:56:24.882581 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="ceilometer-notification-agent" containerID="cri-o://ba6f5ed13f9f65c0d163a78224a1e73b2939b0e697a155ed40265e5e4794037a" gracePeriod=30 Dec 01 18:56:24 crc kubenswrapper[4935]: I1201 18:56:24.882605 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="sg-core" containerID="cri-o://90cc05ddd7494f7ec59be6dabc7c6e5e864c5f96efc20d4782a7d0ef130ae40e" gracePeriod=30 Dec 01 18:56:24 crc kubenswrapper[4935]: I1201 18:56:24.882533 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="proxy-httpd" containerID="cri-o://eb85e12cacea9f162265f59518cb61846163e7f410a2ff3293b1c21b4aefe465" gracePeriod=30 Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.367527 4935 generic.go:334] "Generic (PLEG): container finished" podID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerID="eb85e12cacea9f162265f59518cb61846163e7f410a2ff3293b1c21b4aefe465" exitCode=0 Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.367569 4935 generic.go:334] "Generic (PLEG): container finished" podID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerID="90cc05ddd7494f7ec59be6dabc7c6e5e864c5f96efc20d4782a7d0ef130ae40e" exitCode=2 Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.367581 4935 generic.go:334] "Generic (PLEG): container finished" podID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerID="f3161deb7ff875fbebb478b2ead89e0c321555814e4313da96e8f9b47292d531" exitCode=0 Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.367603 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37e365-636d-43ea-8479-e5e5fdebe49b","Type":"ContainerDied","Data":"eb85e12cacea9f162265f59518cb61846163e7f410a2ff3293b1c21b4aefe465"} Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.367645 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37e365-636d-43ea-8479-e5e5fdebe49b","Type":"ContainerDied","Data":"90cc05ddd7494f7ec59be6dabc7c6e5e864c5f96efc20d4782a7d0ef130ae40e"} Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.367659 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37e365-636d-43ea-8479-e5e5fdebe49b","Type":"ContainerDied","Data":"f3161deb7ff875fbebb478b2ead89e0c321555814e4313da96e8f9b47292d531"} Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.720406 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.901969 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37e365-636d-43ea-8479-e5e5fdebe49b-log-httpd\") pod \"cf37e365-636d-43ea-8479-e5e5fdebe49b\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.902063 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-config-data\") pod \"cf37e365-636d-43ea-8479-e5e5fdebe49b\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.902139 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-scripts\") pod \"cf37e365-636d-43ea-8479-e5e5fdebe49b\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.902215 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37e365-636d-43ea-8479-e5e5fdebe49b-run-httpd\") pod \"cf37e365-636d-43ea-8479-e5e5fdebe49b\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.902445 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-sg-core-conf-yaml\") pod \"cf37e365-636d-43ea-8479-e5e5fdebe49b\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.902511 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnrmm\" (UniqueName: \"kubernetes.io/projected/cf37e365-636d-43ea-8479-e5e5fdebe49b-kube-api-access-fnrmm\") pod \"cf37e365-636d-43ea-8479-e5e5fdebe49b\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.902509 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf37e365-636d-43ea-8479-e5e5fdebe49b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cf37e365-636d-43ea-8479-e5e5fdebe49b" (UID: "cf37e365-636d-43ea-8479-e5e5fdebe49b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.902580 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-combined-ca-bundle\") pod \"cf37e365-636d-43ea-8479-e5e5fdebe49b\" (UID: \"cf37e365-636d-43ea-8479-e5e5fdebe49b\") " Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.903023 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf37e365-636d-43ea-8479-e5e5fdebe49b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cf37e365-636d-43ea-8479-e5e5fdebe49b" (UID: "cf37e365-636d-43ea-8479-e5e5fdebe49b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.903416 4935 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37e365-636d-43ea-8479-e5e5fdebe49b-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.903451 4935 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37e365-636d-43ea-8479-e5e5fdebe49b-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.907936 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf37e365-636d-43ea-8479-e5e5fdebe49b-kube-api-access-fnrmm" (OuterVolumeSpecName: "kube-api-access-fnrmm") pod "cf37e365-636d-43ea-8479-e5e5fdebe49b" (UID: "cf37e365-636d-43ea-8479-e5e5fdebe49b"). InnerVolumeSpecName "kube-api-access-fnrmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.908787 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-scripts" (OuterVolumeSpecName: "scripts") pod "cf37e365-636d-43ea-8479-e5e5fdebe49b" (UID: "cf37e365-636d-43ea-8479-e5e5fdebe49b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:25 crc kubenswrapper[4935]: I1201 18:56:25.957919 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cf37e365-636d-43ea-8479-e5e5fdebe49b" (UID: "cf37e365-636d-43ea-8479-e5e5fdebe49b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.009602 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.009635 4935 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.009647 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnrmm\" (UniqueName: \"kubernetes.io/projected/cf37e365-636d-43ea-8479-e5e5fdebe49b-kube-api-access-fnrmm\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.020082 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf37e365-636d-43ea-8479-e5e5fdebe49b" (UID: "cf37e365-636d-43ea-8479-e5e5fdebe49b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.064336 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-config-data" (OuterVolumeSpecName: "config-data") pod "cf37e365-636d-43ea-8479-e5e5fdebe49b" (UID: "cf37e365-636d-43ea-8479-e5e5fdebe49b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.112016 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.112046 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf37e365-636d-43ea-8479-e5e5fdebe49b-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.381827 4935 generic.go:334] "Generic (PLEG): container finished" podID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerID="ba6f5ed13f9f65c0d163a78224a1e73b2939b0e697a155ed40265e5e4794037a" exitCode=0 Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.381884 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37e365-636d-43ea-8479-e5e5fdebe49b","Type":"ContainerDied","Data":"ba6f5ed13f9f65c0d163a78224a1e73b2939b0e697a155ed40265e5e4794037a"} Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.381896 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.381927 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37e365-636d-43ea-8479-e5e5fdebe49b","Type":"ContainerDied","Data":"da78eb1ca6f5a2853e14a37657a306d2ff864814a754891c9ac30f48bed97f96"} Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.381949 4935 scope.go:117] "RemoveContainer" containerID="eb85e12cacea9f162265f59518cb61846163e7f410a2ff3293b1c21b4aefe465" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.441091 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.458306 4935 scope.go:117] "RemoveContainer" containerID="90cc05ddd7494f7ec59be6dabc7c6e5e864c5f96efc20d4782a7d0ef130ae40e" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.464837 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.478828 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:26 crc kubenswrapper[4935]: E1201 18:56:26.479864 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="ceilometer-notification-agent" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.480017 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="ceilometer-notification-agent" Dec 01 18:56:26 crc kubenswrapper[4935]: E1201 18:56:26.480098 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="proxy-httpd" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.480193 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="proxy-httpd" Dec 01 18:56:26 crc kubenswrapper[4935]: E1201 18:56:26.480293 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="ceilometer-central-agent" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.480375 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="ceilometer-central-agent" Dec 01 18:56:26 crc kubenswrapper[4935]: E1201 18:56:26.480470 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5466cd9c-f886-4bb7-83c0-2c7a24e339f5" containerName="registry-server" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.480533 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5466cd9c-f886-4bb7-83c0-2c7a24e339f5" containerName="registry-server" Dec 01 18:56:26 crc kubenswrapper[4935]: E1201 18:56:26.480605 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5466cd9c-f886-4bb7-83c0-2c7a24e339f5" containerName="extract-content" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.480670 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5466cd9c-f886-4bb7-83c0-2c7a24e339f5" containerName="extract-content" Dec 01 18:56:26 crc kubenswrapper[4935]: E1201 18:56:26.480746 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="sg-core" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.480807 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="sg-core" Dec 01 18:56:26 crc kubenswrapper[4935]: E1201 18:56:26.480875 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5466cd9c-f886-4bb7-83c0-2c7a24e339f5" containerName="extract-utilities" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.480929 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5466cd9c-f886-4bb7-83c0-2c7a24e339f5" containerName="extract-utilities" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.481303 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="sg-core" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.481401 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="5466cd9c-f886-4bb7-83c0-2c7a24e339f5" containerName="registry-server" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.481481 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="ceilometer-central-agent" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.481550 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="ceilometer-notification-agent" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.481626 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" containerName="proxy-httpd" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.484478 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.489540 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.503820 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.525859 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf37e365-636d-43ea-8479-e5e5fdebe49b" path="/var/lib/kubelet/pods/cf37e365-636d-43ea-8479-e5e5fdebe49b/volumes" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.526715 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.554110 4935 scope.go:117] "RemoveContainer" containerID="ba6f5ed13f9f65c0d163a78224a1e73b2939b0e697a155ed40265e5e4794037a" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.578427 4935 scope.go:117] "RemoveContainer" containerID="f3161deb7ff875fbebb478b2ead89e0c321555814e4313da96e8f9b47292d531" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.607373 4935 scope.go:117] "RemoveContainer" containerID="eb85e12cacea9f162265f59518cb61846163e7f410a2ff3293b1c21b4aefe465" Dec 01 18:56:26 crc kubenswrapper[4935]: E1201 18:56:26.607760 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb85e12cacea9f162265f59518cb61846163e7f410a2ff3293b1c21b4aefe465\": container with ID starting with eb85e12cacea9f162265f59518cb61846163e7f410a2ff3293b1c21b4aefe465 not found: ID does not exist" containerID="eb85e12cacea9f162265f59518cb61846163e7f410a2ff3293b1c21b4aefe465" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.607796 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb85e12cacea9f162265f59518cb61846163e7f410a2ff3293b1c21b4aefe465"} err="failed to get container status \"eb85e12cacea9f162265f59518cb61846163e7f410a2ff3293b1c21b4aefe465\": rpc error: code = NotFound desc = could not find container \"eb85e12cacea9f162265f59518cb61846163e7f410a2ff3293b1c21b4aefe465\": container with ID starting with eb85e12cacea9f162265f59518cb61846163e7f410a2ff3293b1c21b4aefe465 not found: ID does not exist" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.607819 4935 scope.go:117] "RemoveContainer" containerID="90cc05ddd7494f7ec59be6dabc7c6e5e864c5f96efc20d4782a7d0ef130ae40e" Dec 01 18:56:26 crc kubenswrapper[4935]: E1201 18:56:26.609494 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90cc05ddd7494f7ec59be6dabc7c6e5e864c5f96efc20d4782a7d0ef130ae40e\": container with ID starting with 90cc05ddd7494f7ec59be6dabc7c6e5e864c5f96efc20d4782a7d0ef130ae40e not found: ID does not exist" containerID="90cc05ddd7494f7ec59be6dabc7c6e5e864c5f96efc20d4782a7d0ef130ae40e" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.609523 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90cc05ddd7494f7ec59be6dabc7c6e5e864c5f96efc20d4782a7d0ef130ae40e"} err="failed to get container status \"90cc05ddd7494f7ec59be6dabc7c6e5e864c5f96efc20d4782a7d0ef130ae40e\": rpc error: code = NotFound desc = could not find container \"90cc05ddd7494f7ec59be6dabc7c6e5e864c5f96efc20d4782a7d0ef130ae40e\": container with ID starting with 90cc05ddd7494f7ec59be6dabc7c6e5e864c5f96efc20d4782a7d0ef130ae40e not found: ID does not exist" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.609540 4935 scope.go:117] "RemoveContainer" containerID="ba6f5ed13f9f65c0d163a78224a1e73b2939b0e697a155ed40265e5e4794037a" Dec 01 18:56:26 crc kubenswrapper[4935]: E1201 18:56:26.610038 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba6f5ed13f9f65c0d163a78224a1e73b2939b0e697a155ed40265e5e4794037a\": container with ID starting with ba6f5ed13f9f65c0d163a78224a1e73b2939b0e697a155ed40265e5e4794037a not found: ID does not exist" containerID="ba6f5ed13f9f65c0d163a78224a1e73b2939b0e697a155ed40265e5e4794037a" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.610061 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba6f5ed13f9f65c0d163a78224a1e73b2939b0e697a155ed40265e5e4794037a"} err="failed to get container status \"ba6f5ed13f9f65c0d163a78224a1e73b2939b0e697a155ed40265e5e4794037a\": rpc error: code = NotFound desc = could not find container \"ba6f5ed13f9f65c0d163a78224a1e73b2939b0e697a155ed40265e5e4794037a\": container with ID starting with ba6f5ed13f9f65c0d163a78224a1e73b2939b0e697a155ed40265e5e4794037a not found: ID does not exist" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.610073 4935 scope.go:117] "RemoveContainer" containerID="f3161deb7ff875fbebb478b2ead89e0c321555814e4313da96e8f9b47292d531" Dec 01 18:56:26 crc kubenswrapper[4935]: E1201 18:56:26.610444 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3161deb7ff875fbebb478b2ead89e0c321555814e4313da96e8f9b47292d531\": container with ID starting with f3161deb7ff875fbebb478b2ead89e0c321555814e4313da96e8f9b47292d531 not found: ID does not exist" containerID="f3161deb7ff875fbebb478b2ead89e0c321555814e4313da96e8f9b47292d531" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.610466 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3161deb7ff875fbebb478b2ead89e0c321555814e4313da96e8f9b47292d531"} err="failed to get container status \"f3161deb7ff875fbebb478b2ead89e0c321555814e4313da96e8f9b47292d531\": rpc error: code = NotFound desc = could not find container \"f3161deb7ff875fbebb478b2ead89e0c321555814e4313da96e8f9b47292d531\": container with ID starting with f3161deb7ff875fbebb478b2ead89e0c321555814e4313da96e8f9b47292d531 not found: ID does not exist" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.633296 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.633338 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-config-data\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.633390 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-scripts\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.633437 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gvzl\" (UniqueName: \"kubernetes.io/projected/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-kube-api-access-5gvzl\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.633462 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.633543 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-log-httpd\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.633610 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-run-httpd\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.711277 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.736001 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-log-httpd\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.736091 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-run-httpd\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.736135 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.736169 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-config-data\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.736213 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-scripts\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.736258 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gvzl\" (UniqueName: \"kubernetes.io/projected/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-kube-api-access-5gvzl\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.736282 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.736550 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-log-httpd\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.737311 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-run-httpd\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.743515 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.743717 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-scripts\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.744369 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-config-data\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.747654 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.754804 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gvzl\" (UniqueName: \"kubernetes.io/projected/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-kube-api-access-5gvzl\") pod \"ceilometer-0\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: I1201 18:56:26.826708 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:26 crc kubenswrapper[4935]: E1201 18:56:26.958639 4935 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/4189d269e7932ee5e13be893baa6734253c40fa945b4bd902ff7a97951928bd5/diff" to get inode usage: stat /var/lib/containers/storage/overlay/4189d269e7932ee5e13be893baa6734253c40fa945b4bd902ff7a97951928bd5/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_heat-engine-7b798c567c-9slfq_67884ebb-46bf-417e-b499-776f74720d64/heat-engine/0.log" to get inode usage: stat /var/log/pods/openstack_heat-engine-7b798c567c-9slfq_67884ebb-46bf-417e-b499-776f74720d64/heat-engine/0.log: no such file or directory Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.307626 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.394795 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4","Type":"ContainerStarted","Data":"47fd15aa92d87ee06fd247afe04402557848624f9bd471b676f364ce0897c330"} Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.409292 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.487294 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-hfpj6"] Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.489702 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.492271 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.492834 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.505282 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-hfpj6"] Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.508772 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:56:27 crc kubenswrapper[4935]: E1201 18:56:27.509049 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.556719 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-config-data\") pod \"nova-cell0-cell-mapping-hfpj6\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.556784 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q66k2\" (UniqueName: \"kubernetes.io/projected/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-kube-api-access-q66k2\") pod \"nova-cell0-cell-mapping-hfpj6\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.557008 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-scripts\") pod \"nova-cell0-cell-mapping-hfpj6\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.557468 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-hfpj6\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.659404 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-config-data\") pod \"nova-cell0-cell-mapping-hfpj6\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.659457 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q66k2\" (UniqueName: \"kubernetes.io/projected/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-kube-api-access-q66k2\") pod \"nova-cell0-cell-mapping-hfpj6\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.659504 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-scripts\") pod \"nova-cell0-cell-mapping-hfpj6\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.659613 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-hfpj6\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.666680 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-config-data\") pod \"nova-cell0-cell-mapping-hfpj6\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.674839 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-hfpj6\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.681663 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-scripts\") pod \"nova-cell0-cell-mapping-hfpj6\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.719834 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q66k2\" (UniqueName: \"kubernetes.io/projected/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-kube-api-access-q66k2\") pod \"nova-cell0-cell-mapping-hfpj6\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.805604 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.811290 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.816518 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.831414 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.849787 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.873279 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-logs\") pod \"nova-api-0\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " pod="openstack/nova-api-0" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.873337 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " pod="openstack/nova-api-0" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.873387 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdjl8\" (UniqueName: \"kubernetes.io/projected/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-kube-api-access-hdjl8\") pod \"nova-api-0\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " pod="openstack/nova-api-0" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.873414 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-config-data\") pod \"nova-api-0\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " pod="openstack/nova-api-0" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.945309 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-cwgjz"] Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.946837 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-cwgjz" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.978511 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-logs\") pod \"nova-api-0\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " pod="openstack/nova-api-0" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.978574 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " pod="openstack/nova-api-0" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.978633 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdjl8\" (UniqueName: \"kubernetes.io/projected/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-kube-api-access-hdjl8\") pod \"nova-api-0\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " pod="openstack/nova-api-0" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.978665 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-config-data\") pod \"nova-api-0\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " pod="openstack/nova-api-0" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.983414 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-logs\") pod \"nova-api-0\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " pod="openstack/nova-api-0" Dec 01 18:56:27 crc kubenswrapper[4935]: I1201 18:56:27.998351 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-config-data\") pod \"nova-api-0\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " pod="openstack/nova-api-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.006817 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " pod="openstack/nova-api-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.030993 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-cwgjz"] Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.041497 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.043238 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.054512 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.073983 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdjl8\" (UniqueName: \"kubernetes.io/projected/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-kube-api-access-hdjl8\") pod \"nova-api-0\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " pod="openstack/nova-api-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.083171 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3d074f6-a222-4e33-980c-3031ca1ad334-operator-scripts\") pod \"aodh-db-create-cwgjz\" (UID: \"e3d074f6-a222-4e33-980c-3031ca1ad334\") " pod="openstack/aodh-db-create-cwgjz" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.083285 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klqs7\" (UniqueName: \"kubernetes.io/projected/e3d074f6-a222-4e33-980c-3031ca1ad334-kube-api-access-klqs7\") pod \"aodh-db-create-cwgjz\" (UID: \"e3d074f6-a222-4e33-980c-3031ca1ad334\") " pod="openstack/aodh-db-create-cwgjz" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.102976 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.104999 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.108565 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.152061 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.170528 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.196824 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86ab17b2-cedb-4d37-85b3-1f8c5180495f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.196863 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klqs7\" (UniqueName: \"kubernetes.io/projected/e3d074f6-a222-4e33-980c-3031ca1ad334-kube-api-access-klqs7\") pod \"aodh-db-create-cwgjz\" (UID: \"e3d074f6-a222-4e33-980c-3031ca1ad334\") " pod="openstack/aodh-db-create-cwgjz" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.196912 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzbnk\" (UniqueName: \"kubernetes.io/projected/4cebec77-38c2-47dc-8426-6f6802ab516a-kube-api-access-nzbnk\") pod \"nova-scheduler-0\" (UID: \"4cebec77-38c2-47dc-8426-6f6802ab516a\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.196960 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86ab17b2-cedb-4d37-85b3-1f8c5180495f-logs\") pod \"nova-metadata-0\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.197026 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86ab17b2-cedb-4d37-85b3-1f8c5180495f-config-data\") pod \"nova-metadata-0\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.197061 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cebec77-38c2-47dc-8426-6f6802ab516a-config-data\") pod \"nova-scheduler-0\" (UID: \"4cebec77-38c2-47dc-8426-6f6802ab516a\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.197097 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3d074f6-a222-4e33-980c-3031ca1ad334-operator-scripts\") pod \"aodh-db-create-cwgjz\" (UID: \"e3d074f6-a222-4e33-980c-3031ca1ad334\") " pod="openstack/aodh-db-create-cwgjz" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.197123 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99wpg\" (UniqueName: \"kubernetes.io/projected/86ab17b2-cedb-4d37-85b3-1f8c5180495f-kube-api-access-99wpg\") pod \"nova-metadata-0\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.197188 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cebec77-38c2-47dc-8426-6f6802ab516a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4cebec77-38c2-47dc-8426-6f6802ab516a\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.198586 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3d074f6-a222-4e33-980c-3031ca1ad334-operator-scripts\") pod \"aodh-db-create-cwgjz\" (UID: \"e3d074f6-a222-4e33-980c-3031ca1ad334\") " pod="openstack/aodh-db-create-cwgjz" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.206321 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-568d7fd7cf-ccnk9"] Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.208708 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.220856 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.223353 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.225473 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.229788 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klqs7\" (UniqueName: \"kubernetes.io/projected/e3d074f6-a222-4e33-980c-3031ca1ad334-kube-api-access-klqs7\") pod \"aodh-db-create-cwgjz\" (UID: \"e3d074f6-a222-4e33-980c-3031ca1ad334\") " pod="openstack/aodh-db-create-cwgjz" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.236455 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.300394 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-dns-swift-storage-0\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.300774 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-config\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.300862 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv5pr\" (UniqueName: \"kubernetes.io/projected/b4333c8e-1218-41bd-9323-dbd9372366fd-kube-api-access-kv5pr\") pod \"nova-cell1-novncproxy-0\" (UID: \"b4333c8e-1218-41bd-9323-dbd9372366fd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.300906 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86ab17b2-cedb-4d37-85b3-1f8c5180495f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.300954 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-ovsdbserver-nb\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.300994 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzbnk\" (UniqueName: \"kubernetes.io/projected/4cebec77-38c2-47dc-8426-6f6802ab516a-kube-api-access-nzbnk\") pod \"nova-scheduler-0\" (UID: \"4cebec77-38c2-47dc-8426-6f6802ab516a\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.301065 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86ab17b2-cedb-4d37-85b3-1f8c5180495f-logs\") pod \"nova-metadata-0\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.301104 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4333c8e-1218-41bd-9323-dbd9372366fd-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b4333c8e-1218-41bd-9323-dbd9372366fd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.301138 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-dns-svc\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.301210 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4333c8e-1218-41bd-9323-dbd9372366fd-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b4333c8e-1218-41bd-9323-dbd9372366fd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.301258 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86ab17b2-cedb-4d37-85b3-1f8c5180495f-config-data\") pod \"nova-metadata-0\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.301300 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cebec77-38c2-47dc-8426-6f6802ab516a-config-data\") pod \"nova-scheduler-0\" (UID: \"4cebec77-38c2-47dc-8426-6f6802ab516a\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.301326 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-ovsdbserver-sb\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.301365 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfwd2\" (UniqueName: \"kubernetes.io/projected/76a99b6f-1afc-4b35-a462-809d0f160048-kube-api-access-bfwd2\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.301412 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99wpg\" (UniqueName: \"kubernetes.io/projected/86ab17b2-cedb-4d37-85b3-1f8c5180495f-kube-api-access-99wpg\") pod \"nova-metadata-0\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.301470 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cebec77-38c2-47dc-8426-6f6802ab516a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4cebec77-38c2-47dc-8426-6f6802ab516a\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.305006 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86ab17b2-cedb-4d37-85b3-1f8c5180495f-logs\") pod \"nova-metadata-0\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.309798 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-a212-account-create-update-54lf7"] Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.312124 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-a212-account-create-update-54lf7" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.314324 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.326201 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-568d7fd7cf-ccnk9"] Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.331257 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86ab17b2-cedb-4d37-85b3-1f8c5180495f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.331575 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzbnk\" (UniqueName: \"kubernetes.io/projected/4cebec77-38c2-47dc-8426-6f6802ab516a-kube-api-access-nzbnk\") pod \"nova-scheduler-0\" (UID: \"4cebec77-38c2-47dc-8426-6f6802ab516a\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.334448 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cebec77-38c2-47dc-8426-6f6802ab516a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4cebec77-38c2-47dc-8426-6f6802ab516a\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.340064 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86ab17b2-cedb-4d37-85b3-1f8c5180495f-config-data\") pod \"nova-metadata-0\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.344580 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cebec77-38c2-47dc-8426-6f6802ab516a-config-data\") pod \"nova-scheduler-0\" (UID: \"4cebec77-38c2-47dc-8426-6f6802ab516a\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.352584 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99wpg\" (UniqueName: \"kubernetes.io/projected/86ab17b2-cedb-4d37-85b3-1f8c5180495f-kube-api-access-99wpg\") pod \"nova-metadata-0\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.371836 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.391211 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-a212-account-create-update-54lf7"] Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.409612 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4333c8e-1218-41bd-9323-dbd9372366fd-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b4333c8e-1218-41bd-9323-dbd9372366fd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.409714 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-dns-svc\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.409776 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4333c8e-1218-41bd-9323-dbd9372366fd-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b4333c8e-1218-41bd-9323-dbd9372366fd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.409856 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-ovsdbserver-sb\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.409891 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfwd2\" (UniqueName: \"kubernetes.io/projected/76a99b6f-1afc-4b35-a462-809d0f160048-kube-api-access-bfwd2\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.410000 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-dns-swift-storage-0\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.410047 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-config\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.410105 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv5pr\" (UniqueName: \"kubernetes.io/projected/b4333c8e-1218-41bd-9323-dbd9372366fd-kube-api-access-kv5pr\") pod \"nova-cell1-novncproxy-0\" (UID: \"b4333c8e-1218-41bd-9323-dbd9372366fd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.410187 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-ovsdbserver-nb\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.411760 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-ovsdbserver-nb\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.414215 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-ovsdbserver-sb\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.414942 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-config\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.416761 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4333c8e-1218-41bd-9323-dbd9372366fd-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b4333c8e-1218-41bd-9323-dbd9372366fd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.424792 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-dns-swift-storage-0\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.425028 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4333c8e-1218-41bd-9323-dbd9372366fd-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b4333c8e-1218-41bd-9323-dbd9372366fd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.427556 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-dns-svc\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.450454 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv5pr\" (UniqueName: \"kubernetes.io/projected/b4333c8e-1218-41bd-9323-dbd9372366fd-kube-api-access-kv5pr\") pod \"nova-cell1-novncproxy-0\" (UID: \"b4333c8e-1218-41bd-9323-dbd9372366fd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.450898 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfwd2\" (UniqueName: \"kubernetes.io/projected/76a99b6f-1afc-4b35-a462-809d0f160048-kube-api-access-bfwd2\") pod \"dnsmasq-dns-568d7fd7cf-ccnk9\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.459074 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.460002 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.481637 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-cwgjz" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.500808 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.505541 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.514518 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763761ce-4c4b-4235-a069-8a5161e5b099-operator-scripts\") pod \"aodh-a212-account-create-update-54lf7\" (UID: \"763761ce-4c4b-4235-a069-8a5161e5b099\") " pod="openstack/aodh-a212-account-create-update-54lf7" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.514917 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8d4f\" (UniqueName: \"kubernetes.io/projected/763761ce-4c4b-4235-a069-8a5161e5b099-kube-api-access-b8d4f\") pod \"aodh-a212-account-create-update-54lf7\" (UID: \"763761ce-4c4b-4235-a069-8a5161e5b099\") " pod="openstack/aodh-a212-account-create-update-54lf7" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.622381 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763761ce-4c4b-4235-a069-8a5161e5b099-operator-scripts\") pod \"aodh-a212-account-create-update-54lf7\" (UID: \"763761ce-4c4b-4235-a069-8a5161e5b099\") " pod="openstack/aodh-a212-account-create-update-54lf7" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.622788 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8d4f\" (UniqueName: \"kubernetes.io/projected/763761ce-4c4b-4235-a069-8a5161e5b099-kube-api-access-b8d4f\") pod \"aodh-a212-account-create-update-54lf7\" (UID: \"763761ce-4c4b-4235-a069-8a5161e5b099\") " pod="openstack/aodh-a212-account-create-update-54lf7" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.625132 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763761ce-4c4b-4235-a069-8a5161e5b099-operator-scripts\") pod \"aodh-a212-account-create-update-54lf7\" (UID: \"763761ce-4c4b-4235-a069-8a5161e5b099\") " pod="openstack/aodh-a212-account-create-update-54lf7" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.656795 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8d4f\" (UniqueName: \"kubernetes.io/projected/763761ce-4c4b-4235-a069-8a5161e5b099-kube-api-access-b8d4f\") pod \"aodh-a212-account-create-update-54lf7\" (UID: \"763761ce-4c4b-4235-a069-8a5161e5b099\") " pod="openstack/aodh-a212-account-create-update-54lf7" Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.723419 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-hfpj6"] Dec 01 18:56:28 crc kubenswrapper[4935]: I1201 18:56:28.837051 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-a212-account-create-update-54lf7" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.042567 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.473957 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4","Type":"ContainerStarted","Data":"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885"} Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.475990 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hfpj6" event={"ID":"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1","Type":"ContainerStarted","Data":"f82df44411d1819120b7b4916c8618b2d42060c04437159f888a29e943a92d2c"} Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.476031 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hfpj6" event={"ID":"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1","Type":"ContainerStarted","Data":"9e74066a6643df6edbb0be8a1d2cfec433e4dceb9aa24f2ac48e0bf398ba788b"} Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.482980 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a2a213c6-43ad-4879-bef7-a2ac48ecd73c","Type":"ContainerStarted","Data":"21e52d3a2005fcbda68c0c527f88e682adf28ab92882c97a9456bb8d5e450b7d"} Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.540214 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-568d7fd7cf-ccnk9"] Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.543917 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-hfpj6" podStartSLOduration=2.543893938 podStartE2EDuration="2.543893938s" podCreationTimestamp="2025-12-01 18:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:56:29.516492365 +0000 UTC m=+1603.538121614" watchObservedRunningTime="2025-12-01 18:56:29.543893938 +0000 UTC m=+1603.565523197" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.569497 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xxfm9"] Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.571368 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.575097 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.575314 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.584772 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xxfm9"] Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.691495 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-xxfm9\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.691668 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brv6c\" (UniqueName: \"kubernetes.io/projected/035b262e-2127-4878-b0c5-fe6374f824a8-kube-api-access-brv6c\") pod \"nova-cell1-conductor-db-sync-xxfm9\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.691735 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-scripts\") pod \"nova-cell1-conductor-db-sync-xxfm9\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.691755 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-config-data\") pod \"nova-cell1-conductor-db-sync-xxfm9\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.805336 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-scripts\") pod \"nova-cell1-conductor-db-sync-xxfm9\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.805674 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-config-data\") pod \"nova-cell1-conductor-db-sync-xxfm9\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.805778 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-xxfm9\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.806206 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brv6c\" (UniqueName: \"kubernetes.io/projected/035b262e-2127-4878-b0c5-fe6374f824a8-kube-api-access-brv6c\") pod \"nova-cell1-conductor-db-sync-xxfm9\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.813496 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-config-data\") pod \"nova-cell1-conductor-db-sync-xxfm9\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.815327 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-scripts\") pod \"nova-cell1-conductor-db-sync-xxfm9\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.826042 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-xxfm9\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.833739 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brv6c\" (UniqueName: \"kubernetes.io/projected/035b262e-2127-4878-b0c5-fe6374f824a8-kube-api-access-brv6c\") pod \"nova-cell1-conductor-db-sync-xxfm9\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.903681 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.911515 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.925777 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-cwgjz"] Dec 01 18:56:29 crc kubenswrapper[4935]: I1201 18:56:29.936756 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.031094 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.074071 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-a212-account-create-update-54lf7"] Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.493056 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xxfm9"] Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.503435 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4cebec77-38c2-47dc-8426-6f6802ab516a","Type":"ContainerStarted","Data":"7fdd4883fc608061b58725001e405addba1577c112928c9c45894299eb50528c"} Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.504931 4935 generic.go:334] "Generic (PLEG): container finished" podID="76a99b6f-1afc-4b35-a462-809d0f160048" containerID="6aecf518c7dd19763d459504f1b74467c4699e556558f4370ab788026247d664" exitCode=0 Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.504971 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" event={"ID":"76a99b6f-1afc-4b35-a462-809d0f160048","Type":"ContainerDied","Data":"6aecf518c7dd19763d459504f1b74467c4699e556558f4370ab788026247d664"} Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.504985 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" event={"ID":"76a99b6f-1afc-4b35-a462-809d0f160048","Type":"ContainerStarted","Data":"a24ba9af04f67956374c8a5c962b446d2dcbb53d8844dc0985da68b9cd2de619"} Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.563947 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-a212-account-create-update-54lf7" event={"ID":"763761ce-4c4b-4235-a069-8a5161e5b099","Type":"ContainerStarted","Data":"42565f5879b10e2cf817587d8b53a93c570f58e300709af98b2d483a77b817e3"} Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.563984 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-a212-account-create-update-54lf7" event={"ID":"763761ce-4c4b-4235-a069-8a5161e5b099","Type":"ContainerStarted","Data":"8a768f4912b3038651e25ee3302e3650d81cdef7271e004ca827056c01a66517"} Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.563996 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"86ab17b2-cedb-4d37-85b3-1f8c5180495f","Type":"ContainerStarted","Data":"63cf572b36075d7f5a407b0a530a6472235e1e2a38a72d0b869b03cc4c8b94e3"} Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.568821 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-cwgjz" event={"ID":"e3d074f6-a222-4e33-980c-3031ca1ad334","Type":"ContainerStarted","Data":"a3c467cfcf687b268526e6c2e5cea47d102b19abdc4539874d751d5131dd8762"} Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.568871 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-cwgjz" event={"ID":"e3d074f6-a222-4e33-980c-3031ca1ad334","Type":"ContainerStarted","Data":"ede519d0bd3445f39600a83e8c1d3ef7bb8e429ba9f691508cde8dcf48d4e0ab"} Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.575388 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-a212-account-create-update-54lf7" podStartSLOduration=3.575367901 podStartE2EDuration="3.575367901s" podCreationTimestamp="2025-12-01 18:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:56:30.552942125 +0000 UTC m=+1604.574571384" watchObservedRunningTime="2025-12-01 18:56:30.575367901 +0000 UTC m=+1604.596997160" Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.613653 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-create-cwgjz" podStartSLOduration=3.613632276 podStartE2EDuration="3.613632276s" podCreationTimestamp="2025-12-01 18:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:56:30.598708546 +0000 UTC m=+1604.620337805" watchObservedRunningTime="2025-12-01 18:56:30.613632276 +0000 UTC m=+1604.635261535" Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.635858 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4","Type":"ContainerStarted","Data":"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171"} Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.636137 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4","Type":"ContainerStarted","Data":"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79"} Dec 01 18:56:30 crc kubenswrapper[4935]: I1201 18:56:30.646014 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b4333c8e-1218-41bd-9323-dbd9372366fd","Type":"ContainerStarted","Data":"fca8711b756674966a659fc48daf7524ffffc90f661c4451b08b3aff2eb52b47"} Dec 01 18:56:31 crc kubenswrapper[4935]: I1201 18:56:31.667986 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xxfm9" event={"ID":"035b262e-2127-4878-b0c5-fe6374f824a8","Type":"ContainerStarted","Data":"1ee441b479e2a8d7aa95eb618469716ef34f1f1ce33464ca2a21a909519d9708"} Dec 01 18:56:31 crc kubenswrapper[4935]: I1201 18:56:31.668264 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xxfm9" event={"ID":"035b262e-2127-4878-b0c5-fe6374f824a8","Type":"ContainerStarted","Data":"a96378bd183c5d7fbe160508d5e94007d7c3a57aeb616692cf3d4e4b6af37ca8"} Dec 01 18:56:31 crc kubenswrapper[4935]: I1201 18:56:31.671075 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" event={"ID":"76a99b6f-1afc-4b35-a462-809d0f160048","Type":"ContainerStarted","Data":"0c76380debe78cfa5abaa1ddf4ca11c8fd89de56056b3d73bdf052acdf244d3e"} Dec 01 18:56:31 crc kubenswrapper[4935]: I1201 18:56:31.671641 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:31 crc kubenswrapper[4935]: I1201 18:56:31.677699 4935 generic.go:334] "Generic (PLEG): container finished" podID="763761ce-4c4b-4235-a069-8a5161e5b099" containerID="42565f5879b10e2cf817587d8b53a93c570f58e300709af98b2d483a77b817e3" exitCode=0 Dec 01 18:56:31 crc kubenswrapper[4935]: I1201 18:56:31.677755 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-a212-account-create-update-54lf7" event={"ID":"763761ce-4c4b-4235-a069-8a5161e5b099","Type":"ContainerDied","Data":"42565f5879b10e2cf817587d8b53a93c570f58e300709af98b2d483a77b817e3"} Dec 01 18:56:31 crc kubenswrapper[4935]: I1201 18:56:31.680925 4935 generic.go:334] "Generic (PLEG): container finished" podID="e3d074f6-a222-4e33-980c-3031ca1ad334" containerID="a3c467cfcf687b268526e6c2e5cea47d102b19abdc4539874d751d5131dd8762" exitCode=0 Dec 01 18:56:31 crc kubenswrapper[4935]: I1201 18:56:31.680962 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-cwgjz" event={"ID":"e3d074f6-a222-4e33-980c-3031ca1ad334","Type":"ContainerDied","Data":"a3c467cfcf687b268526e6c2e5cea47d102b19abdc4539874d751d5131dd8762"} Dec 01 18:56:31 crc kubenswrapper[4935]: I1201 18:56:31.688856 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-xxfm9" podStartSLOduration=2.6888394570000003 podStartE2EDuration="2.688839457s" podCreationTimestamp="2025-12-01 18:56:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:56:31.68800157 +0000 UTC m=+1605.709630829" watchObservedRunningTime="2025-12-01 18:56:31.688839457 +0000 UTC m=+1605.710468716" Dec 01 18:56:31 crc kubenswrapper[4935]: I1201 18:56:31.738532 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" podStartSLOduration=4.738517121 podStartE2EDuration="4.738517121s" podCreationTimestamp="2025-12-01 18:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:56:31.73780472 +0000 UTC m=+1605.759433979" watchObservedRunningTime="2025-12-01 18:56:31.738517121 +0000 UTC m=+1605.760146380" Dec 01 18:56:31 crc kubenswrapper[4935]: W1201 18:56:31.796460 4935 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b8ef570_0e9e_426b_a8dc_dadd94b78be3.slice/crio-conmon-9b45cf6363caf1545d66a95580099b8f744adba96b4dad41d42b6c531c5b9bcd.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b8ef570_0e9e_426b_a8dc_dadd94b78be3.slice/crio-conmon-9b45cf6363caf1545d66a95580099b8f744adba96b4dad41d42b6c531c5b9bcd.scope: no such file or directory Dec 01 18:56:31 crc kubenswrapper[4935]: W1201 18:56:31.796554 4935 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-conmon-801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-conmon-801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52.scope: no such file or directory Dec 01 18:56:31 crc kubenswrapper[4935]: W1201 18:56:31.796577 4935 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b8ef570_0e9e_426b_a8dc_dadd94b78be3.slice/crio-9b45cf6363caf1545d66a95580099b8f744adba96b4dad41d42b6c531c5b9bcd.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b8ef570_0e9e_426b_a8dc_dadd94b78be3.slice/crio-9b45cf6363caf1545d66a95580099b8f744adba96b4dad41d42b6c531c5b9bcd.scope: no such file or directory Dec 01 18:56:31 crc kubenswrapper[4935]: W1201 18:56:31.796593 4935 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-801817a5e0314e7b2d11bd4e19e67ca9f9769ecc9f776f74058bee45e354ce52.scope: no such file or directory Dec 01 18:56:31 crc kubenswrapper[4935]: W1201 18:56:31.796643 4935 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-conmon-f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-conmon-f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba.scope: no such file or directory Dec 01 18:56:31 crc kubenswrapper[4935]: W1201 18:56:31.796657 4935 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-f670cce07f07a7138c3f6015d3ed256b0b90aecd2fb7285077bdf1ab2a8f7cba.scope: no such file or directory Dec 01 18:56:31 crc kubenswrapper[4935]: W1201 18:56:31.796673 4935 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-conmon-06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-conmon-06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964.scope: no such file or directory Dec 01 18:56:31 crc kubenswrapper[4935]: W1201 18:56:31.796688 4935 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-06873815dc72259e378110282aa56d683b2065648bcf01a282053d55b10b2964.scope: no such file or directory Dec 01 18:56:31 crc kubenswrapper[4935]: W1201 18:56:31.796702 4935 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-conmon-3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-conmon-3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2.scope: no such file or directory Dec 01 18:56:31 crc kubenswrapper[4935]: W1201 18:56:31.796717 4935 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode06bff0e_d3dc_42fe_a6f0_83d08a36fad2.slice/crio-3b7f598086427fd2d6dbf0320b7edb0a94d5c4920e81a48dab5d7183469db4a2.scope: no such file or directory Dec 01 18:56:31 crc kubenswrapper[4935]: W1201 18:56:31.796741 4935 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5466cd9c_f886_4bb7_83c0_2c7a24e339f5.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5466cd9c_f886_4bb7_83c0_2c7a24e339f5.slice: no such file or directory Dec 01 18:56:31 crc kubenswrapper[4935]: W1201 18:56:31.796788 4935 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf37e365_636d_43ea_8479_e5e5fdebe49b.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf37e365_636d_43ea_8479_e5e5fdebe49b.slice: no such file or directory Dec 01 18:56:32 crc kubenswrapper[4935]: I1201 18:56:32.240837 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:32 crc kubenswrapper[4935]: I1201 18:56:32.261640 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 18:56:32 crc kubenswrapper[4935]: E1201 18:56:32.379875 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56ed42c4_6eca_40bb_8eb4_0f9c2b7d1522.slice/crio-d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-conmon-0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod750735d4_ef3a_4fad_b258_13bd36897efa.slice/crio-681771bf3ab9c27c236f6a6142a1f2d472909dc1b0cb3210a71554340d8225e5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56ed42c4_6eca_40bb_8eb4_0f9c2b7d1522.slice/crio-conmon-d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-3f8d3f9bedddc8acfe9eb8d773b6a3255b63dda484b3ee8f4f1ab8554957feee\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-73f7ac0269667294b97f2112dcc3b20b5329a96fad806ec0981852e419f2b7c4\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18.scope\": RecentStats: unable to find data in memory cache]" Dec 01 18:56:32 crc kubenswrapper[4935]: E1201 18:56:32.379970 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56ed42c4_6eca_40bb_8eb4_0f9c2b7d1522.slice/crio-conmon-d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-73f7ac0269667294b97f2112dcc3b20b5329a96fad806ec0981852e419f2b7c4\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56ed42c4_6eca_40bb_8eb4_0f9c2b7d1522.slice/crio-d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-3f8d3f9bedddc8acfe9eb8d773b6a3255b63dda484b3ee8f4f1ab8554957feee\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod750735d4_ef3a_4fad_b258_13bd36897efa.slice/crio-conmon-681771bf3ab9c27c236f6a6142a1f2d472909dc1b0cb3210a71554340d8225e5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3394c6e9_bf83_4326_885f_484ebbce1d4d.slice/crio-2ef47ee36414ec0bce100005fe7209985f30e498887f4f4b8a22c0eb762abfc4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod750735d4_ef3a_4fad_b258_13bd36897efa.slice/crio-681771bf3ab9c27c236f6a6142a1f2d472909dc1b0cb3210a71554340d8225e5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-conmon-0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8.scope\": RecentStats: unable to find data in memory cache]" Dec 01 18:56:32 crc kubenswrapper[4935]: E1201 18:56:32.382601 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-conmon-0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-73f7ac0269667294b97f2112dcc3b20b5329a96fad806ec0981852e419f2b7c4\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56ed42c4_6eca_40bb_8eb4_0f9c2b7d1522.slice/crio-conmon-d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56ed42c4_6eca_40bb_8eb4_0f9c2b7d1522.slice/crio-d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-3f8d3f9bedddc8acfe9eb8d773b6a3255b63dda484b3ee8f4f1ab8554957feee\": RecentStats: unable to find data in memory cache]" Dec 01 18:56:32 crc kubenswrapper[4935]: E1201 18:56:32.387535 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-73f7ac0269667294b97f2112dcc3b20b5329a96fad806ec0981852e419f2b7c4\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod750735d4_ef3a_4fad_b258_13bd36897efa.slice/crio-681771bf3ab9c27c236f6a6142a1f2d472909dc1b0cb3210a71554340d8225e5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56ed42c4_6eca_40bb_8eb4_0f9c2b7d1522.slice/crio-conmon-d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-conmon-0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56ed42c4_6eca_40bb_8eb4_0f9c2b7d1522.slice/crio-d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3394c6e9_bf83_4326_885f_484ebbce1d4d.slice/crio-2ef47ee36414ec0bce100005fe7209985f30e498887f4f4b8a22c0eb762abfc4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod750735d4_ef3a_4fad_b258_13bd36897efa.slice/crio-conmon-681771bf3ab9c27c236f6a6142a1f2d472909dc1b0cb3210a71554340d8225e5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-3f8d3f9bedddc8acfe9eb8d773b6a3255b63dda484b3ee8f4f1ab8554957feee\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9.scope\": RecentStats: unable to find data in memory cache]" Dec 01 18:56:32 crc kubenswrapper[4935]: E1201 18:56:32.395899 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56ed42c4_6eca_40bb_8eb4_0f9c2b7d1522.slice/crio-conmon-d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-73f7ac0269667294b97f2112dcc3b20b5329a96fad806ec0981852e419f2b7c4\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56ed42c4_6eca_40bb_8eb4_0f9c2b7d1522.slice/crio-d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3394c6e9_bf83_4326_885f_484ebbce1d4d.slice/crio-2ef47ee36414ec0bce100005fe7209985f30e498887f4f4b8a22c0eb762abfc4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod750735d4_ef3a_4fad_b258_13bd36897efa.slice/crio-conmon-681771bf3ab9c27c236f6a6142a1f2d472909dc1b0cb3210a71554340d8225e5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-eaca2f3fed917504349c5b762ccbdf301b64607779add3e162cf614dc1fc2807.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67884ebb_46bf_417e_b499_776f74720d64.slice/crio-conmon-0812c71756d5279e8f91ccc94c6f2d42d3fd8c9c84e525c3e653c670936e08f8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-4caac1c39165ea1bf473188a49fb5b017b28861ec9c954b98d805e8df5f10b18.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-3d234b34800a18a8b1a08838935729ff7be43cc8c842b64fd3252ef67df6a1d9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-3f8d3f9bedddc8acfe9eb8d773b6a3255b63dda484b3ee8f4f1ab8554957feee\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-conmon-5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc773f2fd_7296_4d82_8a48_ed7acefcaac0.slice/crio-5f014b566ddccf98af851249ba5fe3ebc540b516406d41cd063658e465aab51a.scope\": RecentStats: unable to find data in memory cache]" Dec 01 18:56:32 crc kubenswrapper[4935]: E1201 18:56:32.419062 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod750735d4_ef3a_4fad_b258_13bd36897efa.slice/crio-681771bf3ab9c27c236f6a6142a1f2d472909dc1b0cb3210a71554340d8225e5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3394c6e9_bf83_4326_885f_484ebbce1d4d.slice/crio-2ef47ee36414ec0bce100005fe7209985f30e498887f4f4b8a22c0eb762abfc4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod750735d4_ef3a_4fad_b258_13bd36897efa.slice/crio-conmon-681771bf3ab9c27c236f6a6142a1f2d472909dc1b0cb3210a71554340d8225e5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3394c6e9_bf83_4326_885f_484ebbce1d4d.slice/crio-conmon-2ef47ee36414ec0bce100005fe7209985f30e498887f4f4b8a22c0eb762abfc4.scope\": RecentStats: unable to find data in memory cache]" Dec 01 18:56:32 crc kubenswrapper[4935]: I1201 18:56:32.739921 4935 generic.go:334] "Generic (PLEG): container finished" podID="3394c6e9-bf83-4326-885f-484ebbce1d4d" containerID="2ef47ee36414ec0bce100005fe7209985f30e498887f4f4b8a22c0eb762abfc4" exitCode=137 Dec 01 18:56:32 crc kubenswrapper[4935]: I1201 18:56:32.740341 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-75b65c59cf-g7mdb" event={"ID":"3394c6e9-bf83-4326-885f-484ebbce1d4d","Type":"ContainerDied","Data":"2ef47ee36414ec0bce100005fe7209985f30e498887f4f4b8a22c0eb762abfc4"} Dec 01 18:56:32 crc kubenswrapper[4935]: I1201 18:56:32.746210 4935 generic.go:334] "Generic (PLEG): container finished" podID="750735d4-ef3a-4fad-b258-13bd36897efa" containerID="681771bf3ab9c27c236f6a6142a1f2d472909dc1b0cb3210a71554340d8225e5" exitCode=137 Dec 01 18:56:32 crc kubenswrapper[4935]: I1201 18:56:32.747512 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-5dc988865-sv2ln" event={"ID":"750735d4-ef3a-4fad-b258-13bd36897efa","Type":"ContainerDied","Data":"681771bf3ab9c27c236f6a6142a1f2d472909dc1b0cb3210a71554340d8225e5"} Dec 01 18:56:33 crc kubenswrapper[4935]: I1201 18:56:33.950002 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-a212-account-create-update-54lf7" Dec 01 18:56:33 crc kubenswrapper[4935]: I1201 18:56:33.966744 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:56:33 crc kubenswrapper[4935]: I1201 18:56:33.975557 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:56:33 crc kubenswrapper[4935]: I1201 18:56:33.976532 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-cwgjz" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.013943 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8d4f\" (UniqueName: \"kubernetes.io/projected/763761ce-4c4b-4235-a069-8a5161e5b099-kube-api-access-b8d4f\") pod \"763761ce-4c4b-4235-a069-8a5161e5b099\" (UID: \"763761ce-4c4b-4235-a069-8a5161e5b099\") " Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.014094 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763761ce-4c4b-4235-a069-8a5161e5b099-operator-scripts\") pod \"763761ce-4c4b-4235-a069-8a5161e5b099\" (UID: \"763761ce-4c4b-4235-a069-8a5161e5b099\") " Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.015328 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/763761ce-4c4b-4235-a069-8a5161e5b099-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "763761ce-4c4b-4235-a069-8a5161e5b099" (UID: "763761ce-4c4b-4235-a069-8a5161e5b099"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.029947 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/763761ce-4c4b-4235-a069-8a5161e5b099-kube-api-access-b8d4f" (OuterVolumeSpecName: "kube-api-access-b8d4f") pod "763761ce-4c4b-4235-a069-8a5161e5b099" (UID: "763761ce-4c4b-4235-a069-8a5161e5b099"). InnerVolumeSpecName "kube-api-access-b8d4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.121209 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klqs7\" (UniqueName: \"kubernetes.io/projected/e3d074f6-a222-4e33-980c-3031ca1ad334-kube-api-access-klqs7\") pod \"e3d074f6-a222-4e33-980c-3031ca1ad334\" (UID: \"e3d074f6-a222-4e33-980c-3031ca1ad334\") " Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.121256 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-config-data-custom\") pod \"750735d4-ef3a-4fad-b258-13bd36897efa\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.121323 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-config-data\") pod \"3394c6e9-bf83-4326-885f-484ebbce1d4d\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.121352 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-combined-ca-bundle\") pod \"750735d4-ef3a-4fad-b258-13bd36897efa\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.121516 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbbtr\" (UniqueName: \"kubernetes.io/projected/3394c6e9-bf83-4326-885f-484ebbce1d4d-kube-api-access-dbbtr\") pod \"3394c6e9-bf83-4326-885f-484ebbce1d4d\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.121590 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-combined-ca-bundle\") pod \"3394c6e9-bf83-4326-885f-484ebbce1d4d\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.121638 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdddr\" (UniqueName: \"kubernetes.io/projected/750735d4-ef3a-4fad-b258-13bd36897efa-kube-api-access-rdddr\") pod \"750735d4-ef3a-4fad-b258-13bd36897efa\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.122544 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-config-data\") pod \"750735d4-ef3a-4fad-b258-13bd36897efa\" (UID: \"750735d4-ef3a-4fad-b258-13bd36897efa\") " Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.122620 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-config-data-custom\") pod \"3394c6e9-bf83-4326-885f-484ebbce1d4d\" (UID: \"3394c6e9-bf83-4326-885f-484ebbce1d4d\") " Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.122656 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3d074f6-a222-4e33-980c-3031ca1ad334-operator-scripts\") pod \"e3d074f6-a222-4e33-980c-3031ca1ad334\" (UID: \"e3d074f6-a222-4e33-980c-3031ca1ad334\") " Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.123350 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3d074f6-a222-4e33-980c-3031ca1ad334-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e3d074f6-a222-4e33-980c-3031ca1ad334" (UID: "e3d074f6-a222-4e33-980c-3031ca1ad334"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.124875 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8d4f\" (UniqueName: \"kubernetes.io/projected/763761ce-4c4b-4235-a069-8a5161e5b099-kube-api-access-b8d4f\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.124895 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3d074f6-a222-4e33-980c-3031ca1ad334-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.125613 4935 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763761ce-4c4b-4235-a069-8a5161e5b099-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.133850 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3394c6e9-bf83-4326-885f-484ebbce1d4d-kube-api-access-dbbtr" (OuterVolumeSpecName: "kube-api-access-dbbtr") pod "3394c6e9-bf83-4326-885f-484ebbce1d4d" (UID: "3394c6e9-bf83-4326-885f-484ebbce1d4d"). InnerVolumeSpecName "kube-api-access-dbbtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.133926 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/750735d4-ef3a-4fad-b258-13bd36897efa-kube-api-access-rdddr" (OuterVolumeSpecName: "kube-api-access-rdddr") pod "750735d4-ef3a-4fad-b258-13bd36897efa" (UID: "750735d4-ef3a-4fad-b258-13bd36897efa"). InnerVolumeSpecName "kube-api-access-rdddr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.134114 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3394c6e9-bf83-4326-885f-484ebbce1d4d" (UID: "3394c6e9-bf83-4326-885f-484ebbce1d4d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.134186 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3d074f6-a222-4e33-980c-3031ca1ad334-kube-api-access-klqs7" (OuterVolumeSpecName: "kube-api-access-klqs7") pod "e3d074f6-a222-4e33-980c-3031ca1ad334" (UID: "e3d074f6-a222-4e33-980c-3031ca1ad334"). InnerVolumeSpecName "kube-api-access-klqs7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.141811 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "750735d4-ef3a-4fad-b258-13bd36897efa" (UID: "750735d4-ef3a-4fad-b258-13bd36897efa"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.167247 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "750735d4-ef3a-4fad-b258-13bd36897efa" (UID: "750735d4-ef3a-4fad-b258-13bd36897efa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.227283 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.227309 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbbtr\" (UniqueName: \"kubernetes.io/projected/3394c6e9-bf83-4326-885f-484ebbce1d4d-kube-api-access-dbbtr\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.227320 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdddr\" (UniqueName: \"kubernetes.io/projected/750735d4-ef3a-4fad-b258-13bd36897efa-kube-api-access-rdddr\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.227330 4935 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.227339 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klqs7\" (UniqueName: \"kubernetes.io/projected/e3d074f6-a222-4e33-980c-3031ca1ad334-kube-api-access-klqs7\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.227348 4935 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.233783 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-config-data" (OuterVolumeSpecName: "config-data") pod "3394c6e9-bf83-4326-885f-484ebbce1d4d" (UID: "3394c6e9-bf83-4326-885f-484ebbce1d4d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.247373 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-config-data" (OuterVolumeSpecName: "config-data") pod "750735d4-ef3a-4fad-b258-13bd36897efa" (UID: "750735d4-ef3a-4fad-b258-13bd36897efa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.260956 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3394c6e9-bf83-4326-885f-484ebbce1d4d" (UID: "3394c6e9-bf83-4326-885f-484ebbce1d4d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.329829 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.329861 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/750735d4-ef3a-4fad-b258-13bd36897efa-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.329870 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3394c6e9-bf83-4326-885f-484ebbce1d4d-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.792709 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-75b65c59cf-g7mdb" event={"ID":"3394c6e9-bf83-4326-885f-484ebbce1d4d","Type":"ContainerDied","Data":"95cfa1e065a8e72ed565bdf8088540fa6e2d9810f9cc34ef42be10464ffa467b"} Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.792767 4935 scope.go:117] "RemoveContainer" containerID="2ef47ee36414ec0bce100005fe7209985f30e498887f4f4b8a22c0eb762abfc4" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.792982 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-75b65c59cf-g7mdb" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.799447 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-a212-account-create-update-54lf7" event={"ID":"763761ce-4c4b-4235-a069-8a5161e5b099","Type":"ContainerDied","Data":"8a768f4912b3038651e25ee3302e3650d81cdef7271e004ca827056c01a66517"} Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.799487 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a768f4912b3038651e25ee3302e3650d81cdef7271e004ca827056c01a66517" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.799581 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-a212-account-create-update-54lf7" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.807706 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-5dc988865-sv2ln" event={"ID":"750735d4-ef3a-4fad-b258-13bd36897efa","Type":"ContainerDied","Data":"fe9e836fcbb6203998c7a679f458959bedf2591c74ee79b5cf392a8f43d56bf4"} Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.807907 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-5dc988865-sv2ln" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.820021 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-cwgjz" event={"ID":"e3d074f6-a222-4e33-980c-3031ca1ad334","Type":"ContainerDied","Data":"ede519d0bd3445f39600a83e8c1d3ef7bb8e429ba9f691508cde8dcf48d4e0ab"} Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.820239 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ede519d0bd3445f39600a83e8c1d3ef7bb8e429ba9f691508cde8dcf48d4e0ab" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.820388 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-cwgjz" Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.894813 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-75b65c59cf-g7mdb"] Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.936810 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-75b65c59cf-g7mdb"] Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.956198 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-5dc988865-sv2ln"] Dec 01 18:56:34 crc kubenswrapper[4935]: I1201 18:56:34.973784 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-5dc988865-sv2ln"] Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.353742 4935 scope.go:117] "RemoveContainer" containerID="681771bf3ab9c27c236f6a6142a1f2d472909dc1b0cb3210a71554340d8225e5" Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.858558 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b4333c8e-1218-41bd-9323-dbd9372366fd","Type":"ContainerStarted","Data":"31501ef3bdf67ee6263e4d2a8791cb158ae4e02e6ad2602d1c33ab6ebd79c4ad"} Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.858704 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="b4333c8e-1218-41bd-9323-dbd9372366fd" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://31501ef3bdf67ee6263e4d2a8791cb158ae4e02e6ad2602d1c33ab6ebd79c4ad" gracePeriod=30 Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.861181 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4cebec77-38c2-47dc-8426-6f6802ab516a","Type":"ContainerStarted","Data":"201287f5050595e1ade9cc11a16d59788a14bd2f4e2e23e6ada642434b9a2fd8"} Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.876484 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"86ab17b2-cedb-4d37-85b3-1f8c5180495f","Type":"ContainerStarted","Data":"e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c"} Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.884343 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.435588006 podStartE2EDuration="8.884323339s" podCreationTimestamp="2025-12-01 18:56:27 +0000 UTC" firstStartedPulling="2025-12-01 18:56:29.957586059 +0000 UTC m=+1603.979215318" lastFinishedPulling="2025-12-01 18:56:35.406321392 +0000 UTC m=+1609.427950651" observedRunningTime="2025-12-01 18:56:35.878113695 +0000 UTC m=+1609.899742954" watchObservedRunningTime="2025-12-01 18:56:35.884323339 +0000 UTC m=+1609.905952598" Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.886735 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a2a213c6-43ad-4879-bef7-a2ac48ecd73c","Type":"ContainerStarted","Data":"10e594f97af0fa8774d501c90c7705fdec6b29c3c079f11790026b9f332a5a06"} Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.907357 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.580845573 podStartE2EDuration="8.907336585s" podCreationTimestamp="2025-12-01 18:56:27 +0000 UTC" firstStartedPulling="2025-12-01 18:56:30.076299769 +0000 UTC m=+1604.097929028" lastFinishedPulling="2025-12-01 18:56:35.402790781 +0000 UTC m=+1609.424420040" observedRunningTime="2025-12-01 18:56:35.896829414 +0000 UTC m=+1609.918458673" watchObservedRunningTime="2025-12-01 18:56:35.907336585 +0000 UTC m=+1609.928965844" Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.910284 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4","Type":"ContainerStarted","Data":"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57"} Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.910436 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="ceilometer-central-agent" containerID="cri-o://657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885" gracePeriod=30 Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.910854 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.911127 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="sg-core" containerID="cri-o://700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171" gracePeriod=30 Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.911201 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="proxy-httpd" containerID="cri-o://2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57" gracePeriod=30 Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.911249 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="ceilometer-notification-agent" containerID="cri-o://574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79" gracePeriod=30 Dec 01 18:56:35 crc kubenswrapper[4935]: I1201 18:56:35.944973 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9034652319999998 podStartE2EDuration="9.94495714s" podCreationTimestamp="2025-12-01 18:56:26 +0000 UTC" firstStartedPulling="2025-12-01 18:56:27.312391982 +0000 UTC m=+1601.334021251" lastFinishedPulling="2025-12-01 18:56:35.3538839 +0000 UTC m=+1609.375513159" observedRunningTime="2025-12-01 18:56:35.942679769 +0000 UTC m=+1609.964309028" watchObservedRunningTime="2025-12-01 18:56:35.94495714 +0000 UTC m=+1609.966586399" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.522967 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3394c6e9-bf83-4326-885f-484ebbce1d4d" path="/var/lib/kubelet/pods/3394c6e9-bf83-4326-885f-484ebbce1d4d/volumes" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.524198 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="750735d4-ef3a-4fad-b258-13bd36897efa" path="/var/lib/kubelet/pods/750735d4-ef3a-4fad-b258-13bd36897efa/volumes" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.726503 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.803424 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gvzl\" (UniqueName: \"kubernetes.io/projected/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-kube-api-access-5gvzl\") pod \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.803666 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-log-httpd\") pod \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.803742 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-sg-core-conf-yaml\") pod \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.803797 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-combined-ca-bundle\") pod \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.803838 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-config-data\") pod \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.803886 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-run-httpd\") pod \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.803921 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-scripts\") pod \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\" (UID: \"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4\") " Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.806819 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" (UID: "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.807078 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" (UID: "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.811117 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-scripts" (OuterVolumeSpecName: "scripts") pod "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" (UID: "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.811227 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-kube-api-access-5gvzl" (OuterVolumeSpecName: "kube-api-access-5gvzl") pod "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" (UID: "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4"). InnerVolumeSpecName "kube-api-access-5gvzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.837643 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" (UID: "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.903737 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" (UID: "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.906670 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gvzl\" (UniqueName: \"kubernetes.io/projected/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-kube-api-access-5gvzl\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.906698 4935 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.906707 4935 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.906716 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.906724 4935 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.906731 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.971333 4935 generic.go:334] "Generic (PLEG): container finished" podID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerID="2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57" exitCode=0 Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.971364 4935 generic.go:334] "Generic (PLEG): container finished" podID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerID="700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171" exitCode=2 Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.971372 4935 generic.go:334] "Generic (PLEG): container finished" podID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerID="574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79" exitCode=0 Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.971379 4935 generic.go:334] "Generic (PLEG): container finished" podID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerID="657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885" exitCode=0 Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.971478 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4","Type":"ContainerDied","Data":"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57"} Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.971510 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4","Type":"ContainerDied","Data":"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171"} Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.971533 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4","Type":"ContainerDied","Data":"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79"} Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.971543 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4","Type":"ContainerDied","Data":"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885"} Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.971552 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4","Type":"ContainerDied","Data":"47fd15aa92d87ee06fd247afe04402557848624f9bd471b676f364ce0897c330"} Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.971570 4935 scope.go:117] "RemoveContainer" containerID="2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.972130 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.976188 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"86ab17b2-cedb-4d37-85b3-1f8c5180495f","Type":"ContainerStarted","Data":"a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09"} Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.976329 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="86ab17b2-cedb-4d37-85b3-1f8c5180495f" containerName="nova-metadata-log" containerID="cri-o://e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c" gracePeriod=30 Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.976583 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="86ab17b2-cedb-4d37-85b3-1f8c5180495f" containerName="nova-metadata-metadata" containerID="cri-o://a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09" gracePeriod=30 Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.983729 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-config-data" (OuterVolumeSpecName: "config-data") pod "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" (UID: "66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:36.990416 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a2a213c6-43ad-4879-bef7-a2ac48ecd73c","Type":"ContainerStarted","Data":"c18d43334fa83bebdc71aec1608e41e9c4b55b67d0934790c6df7ab8832e1703"} Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.014951 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.019200 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=4.57563049 podStartE2EDuration="10.019140788s" podCreationTimestamp="2025-12-01 18:56:27 +0000 UTC" firstStartedPulling="2025-12-01 18:56:29.95918516 +0000 UTC m=+1603.980814419" lastFinishedPulling="2025-12-01 18:56:35.402695448 +0000 UTC m=+1609.424324717" observedRunningTime="2025-12-01 18:56:36.998644802 +0000 UTC m=+1611.020274061" watchObservedRunningTime="2025-12-01 18:56:37.019140788 +0000 UTC m=+1611.040770047" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.029516 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.832521141 podStartE2EDuration="10.029500404s" podCreationTimestamp="2025-12-01 18:56:27 +0000 UTC" firstStartedPulling="2025-12-01 18:56:29.205703975 +0000 UTC m=+1603.227333234" lastFinishedPulling="2025-12-01 18:56:35.402683238 +0000 UTC m=+1609.424312497" observedRunningTime="2025-12-01 18:56:37.019595613 +0000 UTC m=+1611.041224882" watchObservedRunningTime="2025-12-01 18:56:37.029500404 +0000 UTC m=+1611.051129663" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.076922 4935 scope.go:117] "RemoveContainer" containerID="700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.098840 4935 scope.go:117] "RemoveContainer" containerID="574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.252849 4935 scope.go:117] "RemoveContainer" containerID="657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.285619 4935 scope.go:117] "RemoveContainer" containerID="2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57" Dec 01 18:56:37 crc kubenswrapper[4935]: E1201 18:56:37.286630 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57\": container with ID starting with 2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57 not found: ID does not exist" containerID="2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.286708 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57"} err="failed to get container status \"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57\": rpc error: code = NotFound desc = could not find container \"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57\": container with ID starting with 2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.286765 4935 scope.go:117] "RemoveContainer" containerID="700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171" Dec 01 18:56:37 crc kubenswrapper[4935]: E1201 18:56:37.287115 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171\": container with ID starting with 700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171 not found: ID does not exist" containerID="700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.287262 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171"} err="failed to get container status \"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171\": rpc error: code = NotFound desc = could not find container \"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171\": container with ID starting with 700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.287283 4935 scope.go:117] "RemoveContainer" containerID="574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79" Dec 01 18:56:37 crc kubenswrapper[4935]: E1201 18:56:37.288311 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79\": container with ID starting with 574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79 not found: ID does not exist" containerID="574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.288337 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79"} err="failed to get container status \"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79\": rpc error: code = NotFound desc = could not find container \"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79\": container with ID starting with 574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.288357 4935 scope.go:117] "RemoveContainer" containerID="657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885" Dec 01 18:56:37 crc kubenswrapper[4935]: E1201 18:56:37.289491 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885\": container with ID starting with 657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885 not found: ID does not exist" containerID="657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.289519 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885"} err="failed to get container status \"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885\": rpc error: code = NotFound desc = could not find container \"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885\": container with ID starting with 657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.289540 4935 scope.go:117] "RemoveContainer" containerID="2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.291327 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57"} err="failed to get container status \"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57\": rpc error: code = NotFound desc = could not find container \"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57\": container with ID starting with 2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.291348 4935 scope.go:117] "RemoveContainer" containerID="700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.291765 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171"} err="failed to get container status \"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171\": rpc error: code = NotFound desc = could not find container \"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171\": container with ID starting with 700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.291808 4935 scope.go:117] "RemoveContainer" containerID="574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.292432 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79"} err="failed to get container status \"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79\": rpc error: code = NotFound desc = could not find container \"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79\": container with ID starting with 574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.292456 4935 scope.go:117] "RemoveContainer" containerID="657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.292734 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885"} err="failed to get container status \"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885\": rpc error: code = NotFound desc = could not find container \"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885\": container with ID starting with 657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.292765 4935 scope.go:117] "RemoveContainer" containerID="2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.292990 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57"} err="failed to get container status \"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57\": rpc error: code = NotFound desc = could not find container \"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57\": container with ID starting with 2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.293012 4935 scope.go:117] "RemoveContainer" containerID="700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.293314 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171"} err="failed to get container status \"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171\": rpc error: code = NotFound desc = could not find container \"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171\": container with ID starting with 700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.293336 4935 scope.go:117] "RemoveContainer" containerID="574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.293530 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79"} err="failed to get container status \"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79\": rpc error: code = NotFound desc = could not find container \"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79\": container with ID starting with 574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.293556 4935 scope.go:117] "RemoveContainer" containerID="657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.293733 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885"} err="failed to get container status \"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885\": rpc error: code = NotFound desc = could not find container \"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885\": container with ID starting with 657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.293750 4935 scope.go:117] "RemoveContainer" containerID="2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.294081 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57"} err="failed to get container status \"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57\": rpc error: code = NotFound desc = could not find container \"2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57\": container with ID starting with 2a6ec80d5eb0ae61f576ba3df0d00a43eab30ba258e99c5e0dd13515e71ceb57 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.294098 4935 scope.go:117] "RemoveContainer" containerID="700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.294277 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171"} err="failed to get container status \"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171\": rpc error: code = NotFound desc = could not find container \"700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171\": container with ID starting with 700922d0fc6be055282b66707e011ddbca2b30ab509091f4cdc9deb0a3abd171 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.294295 4935 scope.go:117] "RemoveContainer" containerID="574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.294457 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79"} err="failed to get container status \"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79\": rpc error: code = NotFound desc = could not find container \"574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79\": container with ID starting with 574b06fd74ad3d37882ae86d5f6b8b99a69ebb3695398cb3c26494e6f087bc79 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.294472 4935 scope.go:117] "RemoveContainer" containerID="657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.294628 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885"} err="failed to get container status \"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885\": rpc error: code = NotFound desc = could not find container \"657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885\": container with ID starting with 657787331af184beb0f128827f7d8e302394163647bdd664c1f26b06d41f9885 not found: ID does not exist" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.324479 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.345442 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.360487 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:37 crc kubenswrapper[4935]: E1201 18:56:37.361040 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3d074f6-a222-4e33-980c-3031ca1ad334" containerName="mariadb-database-create" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361052 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3d074f6-a222-4e33-980c-3031ca1ad334" containerName="mariadb-database-create" Dec 01 18:56:37 crc kubenswrapper[4935]: E1201 18:56:37.361078 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="ceilometer-central-agent" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361084 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="ceilometer-central-agent" Dec 01 18:56:37 crc kubenswrapper[4935]: E1201 18:56:37.361100 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="750735d4-ef3a-4fad-b258-13bd36897efa" containerName="heat-cfnapi" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361105 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="750735d4-ef3a-4fad-b258-13bd36897efa" containerName="heat-cfnapi" Dec 01 18:56:37 crc kubenswrapper[4935]: E1201 18:56:37.361118 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="proxy-httpd" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361123 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="proxy-httpd" Dec 01 18:56:37 crc kubenswrapper[4935]: E1201 18:56:37.361139 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="ceilometer-notification-agent" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361159 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="ceilometer-notification-agent" Dec 01 18:56:37 crc kubenswrapper[4935]: E1201 18:56:37.361174 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="sg-core" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361180 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="sg-core" Dec 01 18:56:37 crc kubenswrapper[4935]: E1201 18:56:37.361224 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3394c6e9-bf83-4326-885f-484ebbce1d4d" containerName="heat-api" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361231 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="3394c6e9-bf83-4326-885f-484ebbce1d4d" containerName="heat-api" Dec 01 18:56:37 crc kubenswrapper[4935]: E1201 18:56:37.361247 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="763761ce-4c4b-4235-a069-8a5161e5b099" containerName="mariadb-account-create-update" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361253 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="763761ce-4c4b-4235-a069-8a5161e5b099" containerName="mariadb-account-create-update" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361494 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="3394c6e9-bf83-4326-885f-484ebbce1d4d" containerName="heat-api" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361512 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="ceilometer-central-agent" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361524 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3d074f6-a222-4e33-980c-3031ca1ad334" containerName="mariadb-database-create" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361542 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="ceilometer-notification-agent" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361551 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="proxy-httpd" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361563 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="750735d4-ef3a-4fad-b258-13bd36897efa" containerName="heat-cfnapi" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361581 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="763761ce-4c4b-4235-a069-8a5161e5b099" containerName="mariadb-account-create-update" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.361590 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" containerName="sg-core" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.364631 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.367580 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.367607 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.388803 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.530976 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwfpg\" (UniqueName: \"kubernetes.io/projected/53308b1c-fda7-40fe-b113-6784940740d9-kube-api-access-zwfpg\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.531343 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53308b1c-fda7-40fe-b113-6784940740d9-log-httpd\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.531432 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.531663 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53308b1c-fda7-40fe-b113-6784940740d9-run-httpd\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.531919 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-scripts\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.531948 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.531973 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-config-data\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.552123 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.634252 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86ab17b2-cedb-4d37-85b3-1f8c5180495f-config-data\") pod \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.634387 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86ab17b2-cedb-4d37-85b3-1f8c5180495f-logs\") pod \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.634834 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99wpg\" (UniqueName: \"kubernetes.io/projected/86ab17b2-cedb-4d37-85b3-1f8c5180495f-kube-api-access-99wpg\") pod \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.634874 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86ab17b2-cedb-4d37-85b3-1f8c5180495f-logs" (OuterVolumeSpecName: "logs") pod "86ab17b2-cedb-4d37-85b3-1f8c5180495f" (UID: "86ab17b2-cedb-4d37-85b3-1f8c5180495f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.634976 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86ab17b2-cedb-4d37-85b3-1f8c5180495f-combined-ca-bundle\") pod \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\" (UID: \"86ab17b2-cedb-4d37-85b3-1f8c5180495f\") " Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.635543 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwfpg\" (UniqueName: \"kubernetes.io/projected/53308b1c-fda7-40fe-b113-6784940740d9-kube-api-access-zwfpg\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.635642 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53308b1c-fda7-40fe-b113-6784940740d9-log-httpd\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.635803 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.635987 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53308b1c-fda7-40fe-b113-6784940740d9-run-httpd\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.636211 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.636258 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-scripts\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.636295 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-config-data\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.636401 4935 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86ab17b2-cedb-4d37-85b3-1f8c5180495f-logs\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.638486 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53308b1c-fda7-40fe-b113-6784940740d9-run-httpd\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.638614 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53308b1c-fda7-40fe-b113-6784940740d9-log-httpd\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.641653 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86ab17b2-cedb-4d37-85b3-1f8c5180495f-kube-api-access-99wpg" (OuterVolumeSpecName: "kube-api-access-99wpg") pod "86ab17b2-cedb-4d37-85b3-1f8c5180495f" (UID: "86ab17b2-cedb-4d37-85b3-1f8c5180495f"). InnerVolumeSpecName "kube-api-access-99wpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.645647 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-config-data\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.646596 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.647362 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.649554 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-scripts\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.660108 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwfpg\" (UniqueName: \"kubernetes.io/projected/53308b1c-fda7-40fe-b113-6784940740d9-kube-api-access-zwfpg\") pod \"ceilometer-0\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.689291 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86ab17b2-cedb-4d37-85b3-1f8c5180495f-config-data" (OuterVolumeSpecName: "config-data") pod "86ab17b2-cedb-4d37-85b3-1f8c5180495f" (UID: "86ab17b2-cedb-4d37-85b3-1f8c5180495f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.691132 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86ab17b2-cedb-4d37-85b3-1f8c5180495f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "86ab17b2-cedb-4d37-85b3-1f8c5180495f" (UID: "86ab17b2-cedb-4d37-85b3-1f8c5180495f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.706873 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.741293 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99wpg\" (UniqueName: \"kubernetes.io/projected/86ab17b2-cedb-4d37-85b3-1f8c5180495f-kube-api-access-99wpg\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.741846 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86ab17b2-cedb-4d37-85b3-1f8c5180495f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:37 crc kubenswrapper[4935]: I1201 18:56:37.741861 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86ab17b2-cedb-4d37-85b3-1f8c5180495f-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.004984 4935 generic.go:334] "Generic (PLEG): container finished" podID="86ab17b2-cedb-4d37-85b3-1f8c5180495f" containerID="a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09" exitCode=0 Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.005510 4935 generic.go:334] "Generic (PLEG): container finished" podID="86ab17b2-cedb-4d37-85b3-1f8c5180495f" containerID="e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c" exitCode=143 Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.005369 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"86ab17b2-cedb-4d37-85b3-1f8c5180495f","Type":"ContainerDied","Data":"a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09"} Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.005607 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"86ab17b2-cedb-4d37-85b3-1f8c5180495f","Type":"ContainerDied","Data":"e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c"} Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.005619 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"86ab17b2-cedb-4d37-85b3-1f8c5180495f","Type":"ContainerDied","Data":"63cf572b36075d7f5a407b0a530a6472235e1e2a38a72d0b869b03cc4c8b94e3"} Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.005636 4935 scope.go:117] "RemoveContainer" containerID="a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.005447 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.039652 4935 scope.go:117] "RemoveContainer" containerID="e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.065984 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.083594 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.089757 4935 scope.go:117] "RemoveContainer" containerID="a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09" Dec 01 18:56:38 crc kubenswrapper[4935]: E1201 18:56:38.091264 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09\": container with ID starting with a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09 not found: ID does not exist" containerID="a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.091299 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09"} err="failed to get container status \"a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09\": rpc error: code = NotFound desc = could not find container \"a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09\": container with ID starting with a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09 not found: ID does not exist" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.091319 4935 scope.go:117] "RemoveContainer" containerID="e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c" Dec 01 18:56:38 crc kubenswrapper[4935]: E1201 18:56:38.092414 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c\": container with ID starting with e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c not found: ID does not exist" containerID="e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.092432 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c"} err="failed to get container status \"e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c\": rpc error: code = NotFound desc = could not find container \"e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c\": container with ID starting with e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c not found: ID does not exist" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.092445 4935 scope.go:117] "RemoveContainer" containerID="a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.093088 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09"} err="failed to get container status \"a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09\": rpc error: code = NotFound desc = could not find container \"a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09\": container with ID starting with a704c220917bfc786a68b6ecba9d70a4eb8710b33c05e16ec72b0935ff3d8e09 not found: ID does not exist" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.093108 4935 scope.go:117] "RemoveContainer" containerID="e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.094219 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c"} err="failed to get container status \"e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c\": rpc error: code = NotFound desc = could not find container \"e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c\": container with ID starting with e815164114468a60370b0fba4021800c0dd2dc58fa67fccdf95f1e5c30f5004c not found: ID does not exist" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.100606 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:38 crc kubenswrapper[4935]: E1201 18:56:38.101158 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86ab17b2-cedb-4d37-85b3-1f8c5180495f" containerName="nova-metadata-log" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.101173 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="86ab17b2-cedb-4d37-85b3-1f8c5180495f" containerName="nova-metadata-log" Dec 01 18:56:38 crc kubenswrapper[4935]: E1201 18:56:38.101197 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86ab17b2-cedb-4d37-85b3-1f8c5180495f" containerName="nova-metadata-metadata" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.101204 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="86ab17b2-cedb-4d37-85b3-1f8c5180495f" containerName="nova-metadata-metadata" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.101448 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="86ab17b2-cedb-4d37-85b3-1f8c5180495f" containerName="nova-metadata-log" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.101464 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="86ab17b2-cedb-4d37-85b3-1f8c5180495f" containerName="nova-metadata-metadata" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.102701 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.105192 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.105420 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.116307 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.151854 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.151913 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-config-data\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.152040 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-logs\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.152071 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v24mp\" (UniqueName: \"kubernetes.io/projected/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-kube-api-access-v24mp\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.152170 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.238814 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.238855 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.254119 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.254430 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-config-data\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.254483 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-logs\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.254504 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v24mp\" (UniqueName: \"kubernetes.io/projected/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-kube-api-access-v24mp\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.254558 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.255905 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-logs\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.262068 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.276291 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-config-data\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.276689 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v24mp\" (UniqueName: \"kubernetes.io/projected/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-kube-api-access-v24mp\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.306734 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.323657 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.384922 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-8dp9s"] Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.386574 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.389100 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.389273 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.389424 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-l8dhf" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.390699 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.427302 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-8dp9s"] Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.434952 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.462600 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.502485 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.502528 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.506986 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.555477 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4" path="/var/lib/kubelet/pods/66dbc1f2-03a1-4af1-90dd-18c4c5fd08a4/volumes" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.559381 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86ab17b2-cedb-4d37-85b3-1f8c5180495f" path="/var/lib/kubelet/pods/86ab17b2-cedb-4d37-85b3-1f8c5180495f/volumes" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.560219 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688b9f5b49-tr78h"] Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.560437 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" podUID="236b38ba-f435-4e71-9777-a15c545ce185" containerName="dnsmasq-dns" containerID="cri-o://a2af2c640063e95aae37a7a579df7b5de4b16fa423d7522b4a0a09f5d754b14a" gracePeriod=10 Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.572658 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-scripts\") pod \"aodh-db-sync-8dp9s\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.572717 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9tt6\" (UniqueName: \"kubernetes.io/projected/2d9754a9-f6c3-405b-ad06-70e432e7eedc-kube-api-access-n9tt6\") pod \"aodh-db-sync-8dp9s\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.572744 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-config-data\") pod \"aodh-db-sync-8dp9s\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.572777 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-combined-ca-bundle\") pod \"aodh-db-sync-8dp9s\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.584739 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.674979 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-scripts\") pod \"aodh-db-sync-8dp9s\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.675018 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9tt6\" (UniqueName: \"kubernetes.io/projected/2d9754a9-f6c3-405b-ad06-70e432e7eedc-kube-api-access-n9tt6\") pod \"aodh-db-sync-8dp9s\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.675048 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-config-data\") pod \"aodh-db-sync-8dp9s\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.675088 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-combined-ca-bundle\") pod \"aodh-db-sync-8dp9s\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.684636 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-scripts\") pod \"aodh-db-sync-8dp9s\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.686027 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-combined-ca-bundle\") pod \"aodh-db-sync-8dp9s\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.687222 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-config-data\") pod \"aodh-db-sync-8dp9s\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.701350 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9tt6\" (UniqueName: \"kubernetes.io/projected/2d9754a9-f6c3-405b-ad06-70e432e7eedc-kube-api-access-n9tt6\") pod \"aodh-db-sync-8dp9s\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.765964 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:38 crc kubenswrapper[4935]: I1201 18:56:38.840558 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" podUID="236b38ba-f435-4e71-9777-a15c545ce185" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.208:5353: connect: connection refused" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.045299 4935 generic.go:334] "Generic (PLEG): container finished" podID="236b38ba-f435-4e71-9777-a15c545ce185" containerID="a2af2c640063e95aae37a7a579df7b5de4b16fa423d7522b4a0a09f5d754b14a" exitCode=0 Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.045668 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" event={"ID":"236b38ba-f435-4e71-9777-a15c545ce185","Type":"ContainerDied","Data":"a2af2c640063e95aae37a7a579df7b5de4b16fa423d7522b4a0a09f5d754b14a"} Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.092953 4935 generic.go:334] "Generic (PLEG): container finished" podID="035b262e-2127-4878-b0c5-fe6374f824a8" containerID="1ee441b479e2a8d7aa95eb618469716ef34f1f1ce33464ca2a21a909519d9708" exitCode=0 Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.093057 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xxfm9" event={"ID":"035b262e-2127-4878-b0c5-fe6374f824a8","Type":"ContainerDied","Data":"1ee441b479e2a8d7aa95eb618469716ef34f1f1ce33464ca2a21a909519d9708"} Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.119993 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53308b1c-fda7-40fe-b113-6784940740d9","Type":"ContainerStarted","Data":"dcd23b863790e5e53e3531c119182d89309cd54a69af499edc2d138246deb1bb"} Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.162427 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.323280 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a2a213c6-43ad-4879-bef7-a2ac48ecd73c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.232:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.323740 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a2a213c6-43ad-4879-bef7-a2ac48ecd73c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.232:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.329921 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.349343 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:39 crc kubenswrapper[4935]: W1201 18:56:39.355317 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f03c9a2_be16_4f6c_9585_c4b7b0dbe150.slice/crio-b0f1f0f41aacdd9c892f088b1923b6e5557a7014306a8cb08e9e67942ddd8092 WatchSource:0}: Error finding container b0f1f0f41aacdd9c892f088b1923b6e5557a7014306a8cb08e9e67942ddd8092: Status 404 returned error can't find the container with id b0f1f0f41aacdd9c892f088b1923b6e5557a7014306a8cb08e9e67942ddd8092 Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.500993 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-dns-svc\") pod \"236b38ba-f435-4e71-9777-a15c545ce185\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.501129 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-ovsdbserver-nb\") pod \"236b38ba-f435-4e71-9777-a15c545ce185\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.501171 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-ovsdbserver-sb\") pod \"236b38ba-f435-4e71-9777-a15c545ce185\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.501233 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-dns-swift-storage-0\") pod \"236b38ba-f435-4e71-9777-a15c545ce185\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.501270 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9wbk\" (UniqueName: \"kubernetes.io/projected/236b38ba-f435-4e71-9777-a15c545ce185-kube-api-access-d9wbk\") pod \"236b38ba-f435-4e71-9777-a15c545ce185\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.501321 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-config\") pod \"236b38ba-f435-4e71-9777-a15c545ce185\" (UID: \"236b38ba-f435-4e71-9777-a15c545ce185\") " Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.550386 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/236b38ba-f435-4e71-9777-a15c545ce185-kube-api-access-d9wbk" (OuterVolumeSpecName: "kube-api-access-d9wbk") pod "236b38ba-f435-4e71-9777-a15c545ce185" (UID: "236b38ba-f435-4e71-9777-a15c545ce185"). InnerVolumeSpecName "kube-api-access-d9wbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.559943 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-8dp9s"] Dec 01 18:56:39 crc kubenswrapper[4935]: W1201 18:56:39.587630 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d9754a9_f6c3_405b_ad06_70e432e7eedc.slice/crio-9f411d27838fe809e6c89cc56921304ee13b8d46eadea2ec0c5a2ac253dbb263 WatchSource:0}: Error finding container 9f411d27838fe809e6c89cc56921304ee13b8d46eadea2ec0c5a2ac253dbb263: Status 404 returned error can't find the container with id 9f411d27838fe809e6c89cc56921304ee13b8d46eadea2ec0c5a2ac253dbb263 Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.596356 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "236b38ba-f435-4e71-9777-a15c545ce185" (UID: "236b38ba-f435-4e71-9777-a15c545ce185"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.603714 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.603740 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9wbk\" (UniqueName: \"kubernetes.io/projected/236b38ba-f435-4e71-9777-a15c545ce185-kube-api-access-d9wbk\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.658278 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "236b38ba-f435-4e71-9777-a15c545ce185" (UID: "236b38ba-f435-4e71-9777-a15c545ce185"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.660820 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-config" (OuterVolumeSpecName: "config") pod "236b38ba-f435-4e71-9777-a15c545ce185" (UID: "236b38ba-f435-4e71-9777-a15c545ce185"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.661709 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "236b38ba-f435-4e71-9777-a15c545ce185" (UID: "236b38ba-f435-4e71-9777-a15c545ce185"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.674678 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "236b38ba-f435-4e71-9777-a15c545ce185" (UID: "236b38ba-f435-4e71-9777-a15c545ce185"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.705509 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.705536 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.705546 4935 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:39 crc kubenswrapper[4935]: I1201 18:56:39.705555 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/236b38ba-f435-4e71-9777-a15c545ce185-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.139573 4935 generic.go:334] "Generic (PLEG): container finished" podID="c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1" containerID="f82df44411d1819120b7b4916c8618b2d42060c04437159f888a29e943a92d2c" exitCode=0 Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.139650 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hfpj6" event={"ID":"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1","Type":"ContainerDied","Data":"f82df44411d1819120b7b4916c8618b2d42060c04437159f888a29e943a92d2c"} Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.142279 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-8dp9s" event={"ID":"2d9754a9-f6c3-405b-ad06-70e432e7eedc","Type":"ContainerStarted","Data":"9f411d27838fe809e6c89cc56921304ee13b8d46eadea2ec0c5a2ac253dbb263"} Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.146300 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" event={"ID":"236b38ba-f435-4e71-9777-a15c545ce185","Type":"ContainerDied","Data":"31eac2740294742fd25d51740cd50a97bdda9d90b53aa28b8f3c15c450f9d0a7"} Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.146363 4935 scope.go:117] "RemoveContainer" containerID="a2af2c640063e95aae37a7a579df7b5de4b16fa423d7522b4a0a09f5d754b14a" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.146573 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688b9f5b49-tr78h" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.175779 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150","Type":"ContainerStarted","Data":"0ac698c876e48fd4cf33bfa6919d5ae04cb34157e3039d787094bc8cbcac42c6"} Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.175829 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150","Type":"ContainerStarted","Data":"98f1edcbfa82a426a693078a4a9f591845e9597142e8e3cb4cef4f2822e57abe"} Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.175840 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150","Type":"ContainerStarted","Data":"b0f1f0f41aacdd9c892f088b1923b6e5557a7014306a8cb08e9e67942ddd8092"} Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.179259 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53308b1c-fda7-40fe-b113-6784940740d9","Type":"ContainerStarted","Data":"14dc81d1e49832d8e41343ea5639a060f501f4bc6160210bfa069b066a5a043b"} Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.179359 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53308b1c-fda7-40fe-b113-6784940740d9","Type":"ContainerStarted","Data":"6f32732461324016276ca8cc47de8a194981e73f53cdb3d72e801103b383658a"} Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.189399 4935 scope.go:117] "RemoveContainer" containerID="0e5e5ba96cc7c7827546a129cda102572667611f48c550b87af8813c5c078496" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.209342 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.209321604 podStartE2EDuration="2.209321604s" podCreationTimestamp="2025-12-01 18:56:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:56:40.205781502 +0000 UTC m=+1614.227410761" watchObservedRunningTime="2025-12-01 18:56:40.209321604 +0000 UTC m=+1614.230950863" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.347845 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688b9f5b49-tr78h"] Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.357538 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-688b9f5b49-tr78h"] Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.528473 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="236b38ba-f435-4e71-9777-a15c545ce185" path="/var/lib/kubelet/pods/236b38ba-f435-4e71-9777-a15c545ce185/volumes" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.726554 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.847078 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-combined-ca-bundle\") pod \"035b262e-2127-4878-b0c5-fe6374f824a8\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.847465 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-config-data\") pod \"035b262e-2127-4878-b0c5-fe6374f824a8\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.847543 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-scripts\") pod \"035b262e-2127-4878-b0c5-fe6374f824a8\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.847597 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brv6c\" (UniqueName: \"kubernetes.io/projected/035b262e-2127-4878-b0c5-fe6374f824a8-kube-api-access-brv6c\") pod \"035b262e-2127-4878-b0c5-fe6374f824a8\" (UID: \"035b262e-2127-4878-b0c5-fe6374f824a8\") " Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.864682 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-scripts" (OuterVolumeSpecName: "scripts") pod "035b262e-2127-4878-b0c5-fe6374f824a8" (UID: "035b262e-2127-4878-b0c5-fe6374f824a8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.864835 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/035b262e-2127-4878-b0c5-fe6374f824a8-kube-api-access-brv6c" (OuterVolumeSpecName: "kube-api-access-brv6c") pod "035b262e-2127-4878-b0c5-fe6374f824a8" (UID: "035b262e-2127-4878-b0c5-fe6374f824a8"). InnerVolumeSpecName "kube-api-access-brv6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.887426 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "035b262e-2127-4878-b0c5-fe6374f824a8" (UID: "035b262e-2127-4878-b0c5-fe6374f824a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.891281 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-config-data" (OuterVolumeSpecName: "config-data") pod "035b262e-2127-4878-b0c5-fe6374f824a8" (UID: "035b262e-2127-4878-b0c5-fe6374f824a8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.950439 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.950628 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.950684 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/035b262e-2127-4878-b0c5-fe6374f824a8-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:40 crc kubenswrapper[4935]: I1201 18:56:40.950745 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brv6c\" (UniqueName: \"kubernetes.io/projected/035b262e-2127-4878-b0c5-fe6374f824a8-kube-api-access-brv6c\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.189555 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 18:56:41 crc kubenswrapper[4935]: E1201 18:56:41.190314 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="035b262e-2127-4878-b0c5-fe6374f824a8" containerName="nova-cell1-conductor-db-sync" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.190329 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="035b262e-2127-4878-b0c5-fe6374f824a8" containerName="nova-cell1-conductor-db-sync" Dec 01 18:56:41 crc kubenswrapper[4935]: E1201 18:56:41.190374 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="236b38ba-f435-4e71-9777-a15c545ce185" containerName="init" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.190380 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="236b38ba-f435-4e71-9777-a15c545ce185" containerName="init" Dec 01 18:56:41 crc kubenswrapper[4935]: E1201 18:56:41.190394 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="236b38ba-f435-4e71-9777-a15c545ce185" containerName="dnsmasq-dns" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.190400 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="236b38ba-f435-4e71-9777-a15c545ce185" containerName="dnsmasq-dns" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.190622 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="035b262e-2127-4878-b0c5-fe6374f824a8" containerName="nova-cell1-conductor-db-sync" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.190648 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="236b38ba-f435-4e71-9777-a15c545ce185" containerName="dnsmasq-dns" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.191469 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.202686 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.231334 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xxfm9" event={"ID":"035b262e-2127-4878-b0c5-fe6374f824a8","Type":"ContainerDied","Data":"a96378bd183c5d7fbe160508d5e94007d7c3a57aeb616692cf3d4e4b6af37ca8"} Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.231370 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a96378bd183c5d7fbe160508d5e94007d7c3a57aeb616692cf3d4e4b6af37ca8" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.231426 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xxfm9" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.259622 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53308b1c-fda7-40fe-b113-6784940740d9","Type":"ContainerStarted","Data":"b99a8d17e1142106c6866ae551f731420401a8ee4745bf7aab7e51291d057920"} Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.358799 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knns2\" (UniqueName: \"kubernetes.io/projected/9af914fc-37f4-4830-9b72-aadeba875772-kube-api-access-knns2\") pod \"nova-cell1-conductor-0\" (UID: \"9af914fc-37f4-4830-9b72-aadeba875772\") " pod="openstack/nova-cell1-conductor-0" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.358888 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9af914fc-37f4-4830-9b72-aadeba875772-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9af914fc-37f4-4830-9b72-aadeba875772\") " pod="openstack/nova-cell1-conductor-0" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.358976 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9af914fc-37f4-4830-9b72-aadeba875772-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9af914fc-37f4-4830-9b72-aadeba875772\") " pod="openstack/nova-cell1-conductor-0" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.461689 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knns2\" (UniqueName: \"kubernetes.io/projected/9af914fc-37f4-4830-9b72-aadeba875772-kube-api-access-knns2\") pod \"nova-cell1-conductor-0\" (UID: \"9af914fc-37f4-4830-9b72-aadeba875772\") " pod="openstack/nova-cell1-conductor-0" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.461760 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9af914fc-37f4-4830-9b72-aadeba875772-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9af914fc-37f4-4830-9b72-aadeba875772\") " pod="openstack/nova-cell1-conductor-0" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.461815 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9af914fc-37f4-4830-9b72-aadeba875772-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9af914fc-37f4-4830-9b72-aadeba875772\") " pod="openstack/nova-cell1-conductor-0" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.471712 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9af914fc-37f4-4830-9b72-aadeba875772-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9af914fc-37f4-4830-9b72-aadeba875772\") " pod="openstack/nova-cell1-conductor-0" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.474515 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9af914fc-37f4-4830-9b72-aadeba875772-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9af914fc-37f4-4830-9b72-aadeba875772\") " pod="openstack/nova-cell1-conductor-0" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.484288 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knns2\" (UniqueName: \"kubernetes.io/projected/9af914fc-37f4-4830-9b72-aadeba875772-kube-api-access-knns2\") pod \"nova-cell1-conductor-0\" (UID: \"9af914fc-37f4-4830-9b72-aadeba875772\") " pod="openstack/nova-cell1-conductor-0" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.509192 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:56:41 crc kubenswrapper[4935]: E1201 18:56:41.509614 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.522278 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.628768 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.769079 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-config-data\") pod \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.769117 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q66k2\" (UniqueName: \"kubernetes.io/projected/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-kube-api-access-q66k2\") pod \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.769211 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-scripts\") pod \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.769273 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-combined-ca-bundle\") pod \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\" (UID: \"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1\") " Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.782614 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-scripts" (OuterVolumeSpecName: "scripts") pod "c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1" (UID: "c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.782702 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-kube-api-access-q66k2" (OuterVolumeSpecName: "kube-api-access-q66k2") pod "c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1" (UID: "c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1"). InnerVolumeSpecName "kube-api-access-q66k2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.823853 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1" (UID: "c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.839758 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-config-data" (OuterVolumeSpecName: "config-data") pod "c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1" (UID: "c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.873789 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.873820 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.873831 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:41 crc kubenswrapper[4935]: I1201 18:56:41.873839 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q66k2\" (UniqueName: \"kubernetes.io/projected/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1-kube-api-access-q66k2\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:42 crc kubenswrapper[4935]: I1201 18:56:42.052688 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 18:56:42 crc kubenswrapper[4935]: I1201 18:56:42.272330 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"9af914fc-37f4-4830-9b72-aadeba875772","Type":"ContainerStarted","Data":"195bbc3001d102153062ca8cb1b3323ca1a75f9a1510087153ff2c8f60cc4b64"} Dec 01 18:56:42 crc kubenswrapper[4935]: I1201 18:56:42.278470 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hfpj6" event={"ID":"c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1","Type":"ContainerDied","Data":"9e74066a6643df6edbb0be8a1d2cfec433e4dceb9aa24f2ac48e0bf398ba788b"} Dec 01 18:56:42 crc kubenswrapper[4935]: I1201 18:56:42.278506 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e74066a6643df6edbb0be8a1d2cfec433e4dceb9aa24f2ac48e0bf398ba788b" Dec 01 18:56:42 crc kubenswrapper[4935]: I1201 18:56:42.278532 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hfpj6" Dec 01 18:56:42 crc kubenswrapper[4935]: I1201 18:56:42.383565 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:56:42 crc kubenswrapper[4935]: I1201 18:56:42.384052 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a2a213c6-43ad-4879-bef7-a2ac48ecd73c" containerName="nova-api-log" containerID="cri-o://10e594f97af0fa8774d501c90c7705fdec6b29c3c079f11790026b9f332a5a06" gracePeriod=30 Dec 01 18:56:42 crc kubenswrapper[4935]: I1201 18:56:42.384138 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a2a213c6-43ad-4879-bef7-a2ac48ecd73c" containerName="nova-api-api" containerID="cri-o://c18d43334fa83bebdc71aec1608e41e9c4b55b67d0934790c6df7ab8832e1703" gracePeriod=30 Dec 01 18:56:42 crc kubenswrapper[4935]: I1201 18:56:42.395991 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:56:42 crc kubenswrapper[4935]: I1201 18:56:42.396193 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="4cebec77-38c2-47dc-8426-6f6802ab516a" containerName="nova-scheduler-scheduler" containerID="cri-o://201287f5050595e1ade9cc11a16d59788a14bd2f4e2e23e6ada642434b9a2fd8" gracePeriod=30 Dec 01 18:56:42 crc kubenswrapper[4935]: I1201 18:56:42.422241 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:42 crc kubenswrapper[4935]: I1201 18:56:42.422456 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" containerName="nova-metadata-log" containerID="cri-o://98f1edcbfa82a426a693078a4a9f591845e9597142e8e3cb4cef4f2822e57abe" gracePeriod=30 Dec 01 18:56:42 crc kubenswrapper[4935]: I1201 18:56:42.422917 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" containerName="nova-metadata-metadata" containerID="cri-o://0ac698c876e48fd4cf33bfa6919d5ae04cb34157e3039d787094bc8cbcac42c6" gracePeriod=30 Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.293018 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53308b1c-fda7-40fe-b113-6784940740d9","Type":"ContainerStarted","Data":"0f3ba03372e14c62035472c683d2a6cea58dea48ceac627866555fdc3a95680a"} Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.294251 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.296403 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"9af914fc-37f4-4830-9b72-aadeba875772","Type":"ContainerStarted","Data":"b54e35535bc95ab2578e9952bcc29fcc035c15b147e8662030f6e1de891ebfad"} Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.296569 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.299411 4935 generic.go:334] "Generic (PLEG): container finished" podID="a2a213c6-43ad-4879-bef7-a2ac48ecd73c" containerID="10e594f97af0fa8774d501c90c7705fdec6b29c3c079f11790026b9f332a5a06" exitCode=143 Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.299477 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a2a213c6-43ad-4879-bef7-a2ac48ecd73c","Type":"ContainerDied","Data":"10e594f97af0fa8774d501c90c7705fdec6b29c3c079f11790026b9f332a5a06"} Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.301366 4935 generic.go:334] "Generic (PLEG): container finished" podID="3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" containerID="0ac698c876e48fd4cf33bfa6919d5ae04cb34157e3039d787094bc8cbcac42c6" exitCode=0 Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.301394 4935 generic.go:334] "Generic (PLEG): container finished" podID="3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" containerID="98f1edcbfa82a426a693078a4a9f591845e9597142e8e3cb4cef4f2822e57abe" exitCode=143 Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.301412 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150","Type":"ContainerDied","Data":"0ac698c876e48fd4cf33bfa6919d5ae04cb34157e3039d787094bc8cbcac42c6"} Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.301431 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150","Type":"ContainerDied","Data":"98f1edcbfa82a426a693078a4a9f591845e9597142e8e3cb4cef4f2822e57abe"} Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.316465 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.219940496 podStartE2EDuration="6.316448502s" podCreationTimestamp="2025-12-01 18:56:37 +0000 UTC" firstStartedPulling="2025-12-01 18:56:38.314434661 +0000 UTC m=+1612.336063920" lastFinishedPulling="2025-12-01 18:56:42.410942667 +0000 UTC m=+1616.432571926" observedRunningTime="2025-12-01 18:56:43.312925481 +0000 UTC m=+1617.334554740" watchObservedRunningTime="2025-12-01 18:56:43.316448502 +0000 UTC m=+1617.338077761" Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.333377 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.333361635 podStartE2EDuration="2.333361635s" podCreationTimestamp="2025-12-01 18:56:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:56:43.330434612 +0000 UTC m=+1617.352063871" watchObservedRunningTime="2025-12-01 18:56:43.333361635 +0000 UTC m=+1617.354990894" Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.435910 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.436029 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 18:56:43 crc kubenswrapper[4935]: E1201 18:56:43.510258 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="201287f5050595e1ade9cc11a16d59788a14bd2f4e2e23e6ada642434b9a2fd8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 18:56:43 crc kubenswrapper[4935]: E1201 18:56:43.514259 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="201287f5050595e1ade9cc11a16d59788a14bd2f4e2e23e6ada642434b9a2fd8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 18:56:43 crc kubenswrapper[4935]: E1201 18:56:43.516672 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="201287f5050595e1ade9cc11a16d59788a14bd2f4e2e23e6ada642434b9a2fd8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 18:56:43 crc kubenswrapper[4935]: E1201 18:56:43.516731 4935 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="4cebec77-38c2-47dc-8426-6f6802ab516a" containerName="nova-scheduler-scheduler" Dec 01 18:56:43 crc kubenswrapper[4935]: I1201 18:56:43.821175 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/heat-cfnapi-5dc988865-sv2ln" podUID="750735d4-ef3a-4fad-b258-13bd36897efa" containerName="heat-cfnapi" probeResult="failure" output="Get \"http://10.217.0.207:8000/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 18:56:44 crc kubenswrapper[4935]: I1201 18:56:44.320914 4935 generic.go:334] "Generic (PLEG): container finished" podID="4cebec77-38c2-47dc-8426-6f6802ab516a" containerID="201287f5050595e1ade9cc11a16d59788a14bd2f4e2e23e6ada642434b9a2fd8" exitCode=0 Dec 01 18:56:44 crc kubenswrapper[4935]: I1201 18:56:44.320968 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4cebec77-38c2-47dc-8426-6f6802ab516a","Type":"ContainerDied","Data":"201287f5050595e1ade9cc11a16d59788a14bd2f4e2e23e6ada642434b9a2fd8"} Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.206401 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.219318 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.291956 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cebec77-38c2-47dc-8426-6f6802ab516a-combined-ca-bundle\") pod \"4cebec77-38c2-47dc-8426-6f6802ab516a\" (UID: \"4cebec77-38c2-47dc-8426-6f6802ab516a\") " Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.292060 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-config-data\") pod \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.292260 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-logs\") pod \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.292333 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-nova-metadata-tls-certs\") pod \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.292412 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cebec77-38c2-47dc-8426-6f6802ab516a-config-data\") pod \"4cebec77-38c2-47dc-8426-6f6802ab516a\" (UID: \"4cebec77-38c2-47dc-8426-6f6802ab516a\") " Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.292511 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v24mp\" (UniqueName: \"kubernetes.io/projected/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-kube-api-access-v24mp\") pod \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.292620 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-logs" (OuterVolumeSpecName: "logs") pod "3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" (UID: "3f03c9a2-be16-4f6c-9585-c4b7b0dbe150"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.292697 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzbnk\" (UniqueName: \"kubernetes.io/projected/4cebec77-38c2-47dc-8426-6f6802ab516a-kube-api-access-nzbnk\") pod \"4cebec77-38c2-47dc-8426-6f6802ab516a\" (UID: \"4cebec77-38c2-47dc-8426-6f6802ab516a\") " Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.299021 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-kube-api-access-v24mp" (OuterVolumeSpecName: "kube-api-access-v24mp") pod "3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" (UID: "3f03c9a2-be16-4f6c-9585-c4b7b0dbe150"). InnerVolumeSpecName "kube-api-access-v24mp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.301535 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-combined-ca-bundle\") pod \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\" (UID: \"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150\") " Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.303313 4935 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-logs\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.303343 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v24mp\" (UniqueName: \"kubernetes.io/projected/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-kube-api-access-v24mp\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.309852 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cebec77-38c2-47dc-8426-6f6802ab516a-kube-api-access-nzbnk" (OuterVolumeSpecName: "kube-api-access-nzbnk") pod "4cebec77-38c2-47dc-8426-6f6802ab516a" (UID: "4cebec77-38c2-47dc-8426-6f6802ab516a"). InnerVolumeSpecName "kube-api-access-nzbnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.338244 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cebec77-38c2-47dc-8426-6f6802ab516a-config-data" (OuterVolumeSpecName: "config-data") pod "4cebec77-38c2-47dc-8426-6f6802ab516a" (UID: "4cebec77-38c2-47dc-8426-6f6802ab516a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.359406 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" (UID: "3f03c9a2-be16-4f6c-9585-c4b7b0dbe150"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.359453 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-config-data" (OuterVolumeSpecName: "config-data") pod "3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" (UID: "3f03c9a2-be16-4f6c-9585-c4b7b0dbe150"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.360417 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.360447 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4cebec77-38c2-47dc-8426-6f6802ab516a","Type":"ContainerDied","Data":"7fdd4883fc608061b58725001e405addba1577c112928c9c45894299eb50528c"} Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.360506 4935 scope.go:117] "RemoveContainer" containerID="201287f5050595e1ade9cc11a16d59788a14bd2f4e2e23e6ada642434b9a2fd8" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.365476 4935 generic.go:334] "Generic (PLEG): container finished" podID="a2a213c6-43ad-4879-bef7-a2ac48ecd73c" containerID="c18d43334fa83bebdc71aec1608e41e9c4b55b67d0934790c6df7ab8832e1703" exitCode=0 Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.365550 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a2a213c6-43ad-4879-bef7-a2ac48ecd73c","Type":"ContainerDied","Data":"c18d43334fa83bebdc71aec1608e41e9c4b55b67d0934790c6df7ab8832e1703"} Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.369662 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3f03c9a2-be16-4f6c-9585-c4b7b0dbe150","Type":"ContainerDied","Data":"b0f1f0f41aacdd9c892f088b1923b6e5557a7014306a8cb08e9e67942ddd8092"} Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.369733 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.391463 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cebec77-38c2-47dc-8426-6f6802ab516a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4cebec77-38c2-47dc-8426-6f6802ab516a" (UID: "4cebec77-38c2-47dc-8426-6f6802ab516a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.406601 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzbnk\" (UniqueName: \"kubernetes.io/projected/4cebec77-38c2-47dc-8426-6f6802ab516a-kube-api-access-nzbnk\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.406632 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.406641 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cebec77-38c2-47dc-8426-6f6802ab516a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.406650 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.406659 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cebec77-38c2-47dc-8426-6f6802ab516a-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.407728 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" (UID: "3f03c9a2-be16-4f6c-9585-c4b7b0dbe150"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.520241 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.520340 4935 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.521815 4935 scope.go:117] "RemoveContainer" containerID="0ac698c876e48fd4cf33bfa6919d5ae04cb34157e3039d787094bc8cbcac42c6" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.608161 4935 scope.go:117] "RemoveContainer" containerID="98f1edcbfa82a426a693078a4a9f591845e9597142e8e3cb4cef4f2822e57abe" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.621523 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-config-data\") pod \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.622780 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-logs\") pod \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.622864 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdjl8\" (UniqueName: \"kubernetes.io/projected/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-kube-api-access-hdjl8\") pod \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.623271 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-logs" (OuterVolumeSpecName: "logs") pod "a2a213c6-43ad-4879-bef7-a2ac48ecd73c" (UID: "a2a213c6-43ad-4879-bef7-a2ac48ecd73c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.623873 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-combined-ca-bundle\") pod \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\" (UID: \"a2a213c6-43ad-4879-bef7-a2ac48ecd73c\") " Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.627026 4935 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-logs\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.629610 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-kube-api-access-hdjl8" (OuterVolumeSpecName: "kube-api-access-hdjl8") pod "a2a213c6-43ad-4879-bef7-a2ac48ecd73c" (UID: "a2a213c6-43ad-4879-bef7-a2ac48ecd73c"). InnerVolumeSpecName "kube-api-access-hdjl8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.654274 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-config-data" (OuterVolumeSpecName: "config-data") pod "a2a213c6-43ad-4879-bef7-a2ac48ecd73c" (UID: "a2a213c6-43ad-4879-bef7-a2ac48ecd73c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.656338 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2a213c6-43ad-4879-bef7-a2ac48ecd73c" (UID: "a2a213c6-43ad-4879-bef7-a2ac48ecd73c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.706100 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.724836 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.728613 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.728639 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdjl8\" (UniqueName: \"kubernetes.io/projected/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-kube-api-access-hdjl8\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.728650 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a213c6-43ad-4879-bef7-a2ac48ecd73c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.742275 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.759045 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.769001 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:56:46 crc kubenswrapper[4935]: E1201 18:56:46.769506 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" containerName="nova-metadata-metadata" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.769525 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" containerName="nova-metadata-metadata" Dec 01 18:56:46 crc kubenswrapper[4935]: E1201 18:56:46.769562 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1" containerName="nova-manage" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.769569 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1" containerName="nova-manage" Dec 01 18:56:46 crc kubenswrapper[4935]: E1201 18:56:46.769583 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2a213c6-43ad-4879-bef7-a2ac48ecd73c" containerName="nova-api-api" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.769589 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2a213c6-43ad-4879-bef7-a2ac48ecd73c" containerName="nova-api-api" Dec 01 18:56:46 crc kubenswrapper[4935]: E1201 18:56:46.769604 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cebec77-38c2-47dc-8426-6f6802ab516a" containerName="nova-scheduler-scheduler" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.769609 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cebec77-38c2-47dc-8426-6f6802ab516a" containerName="nova-scheduler-scheduler" Dec 01 18:56:46 crc kubenswrapper[4935]: E1201 18:56:46.769624 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2a213c6-43ad-4879-bef7-a2ac48ecd73c" containerName="nova-api-log" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.769630 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2a213c6-43ad-4879-bef7-a2ac48ecd73c" containerName="nova-api-log" Dec 01 18:56:46 crc kubenswrapper[4935]: E1201 18:56:46.769643 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" containerName="nova-metadata-log" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.769648 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" containerName="nova-metadata-log" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.769847 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" containerName="nova-metadata-metadata" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.769856 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2a213c6-43ad-4879-bef7-a2ac48ecd73c" containerName="nova-api-api" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.769875 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cebec77-38c2-47dc-8426-6f6802ab516a" containerName="nova-scheduler-scheduler" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.769888 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" containerName="nova-metadata-log" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.769896 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1" containerName="nova-manage" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.769905 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2a213c6-43ad-4879-bef7-a2ac48ecd73c" containerName="nova-api-log" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.770718 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.773365 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.781585 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.783852 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.787074 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.787076 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.793310 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.809279 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.830374 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.830662 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d84da5cc-8329-4e71-bd5f-0a3db9819952-logs\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.830754 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs52l\" (UniqueName: \"kubernetes.io/projected/d38f760d-4c52-437b-b407-778866305310-kube-api-access-hs52l\") pod \"nova-scheduler-0\" (UID: \"d38f760d-4c52-437b-b407-778866305310\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.830888 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-config-data\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.831011 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d38f760d-4c52-437b-b407-778866305310-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d38f760d-4c52-437b-b407-778866305310\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.831103 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d38f760d-4c52-437b-b407-778866305310-config-data\") pod \"nova-scheduler-0\" (UID: \"d38f760d-4c52-437b-b407-778866305310\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.831216 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddvxs\" (UniqueName: \"kubernetes.io/projected/d84da5cc-8329-4e71-bd5f-0a3db9819952-kube-api-access-ddvxs\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.831349 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.932922 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.932976 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d84da5cc-8329-4e71-bd5f-0a3db9819952-logs\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.933017 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs52l\" (UniqueName: \"kubernetes.io/projected/d38f760d-4c52-437b-b407-778866305310-kube-api-access-hs52l\") pod \"nova-scheduler-0\" (UID: \"d38f760d-4c52-437b-b407-778866305310\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.933115 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-config-data\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.933138 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d38f760d-4c52-437b-b407-778866305310-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d38f760d-4c52-437b-b407-778866305310\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.933192 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d38f760d-4c52-437b-b407-778866305310-config-data\") pod \"nova-scheduler-0\" (UID: \"d38f760d-4c52-437b-b407-778866305310\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.933225 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddvxs\" (UniqueName: \"kubernetes.io/projected/d84da5cc-8329-4e71-bd5f-0a3db9819952-kube-api-access-ddvxs\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.933281 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.933724 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d84da5cc-8329-4e71-bd5f-0a3db9819952-logs\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.936179 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.936952 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.937026 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-config-data\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.938519 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d38f760d-4c52-437b-b407-778866305310-config-data\") pod \"nova-scheduler-0\" (UID: \"d38f760d-4c52-437b-b407-778866305310\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.942003 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d38f760d-4c52-437b-b407-778866305310-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d38f760d-4c52-437b-b407-778866305310\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.963413 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs52l\" (UniqueName: \"kubernetes.io/projected/d38f760d-4c52-437b-b407-778866305310-kube-api-access-hs52l\") pod \"nova-scheduler-0\" (UID: \"d38f760d-4c52-437b-b407-778866305310\") " pod="openstack/nova-scheduler-0" Dec 01 18:56:46 crc kubenswrapper[4935]: I1201 18:56:46.965321 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddvxs\" (UniqueName: \"kubernetes.io/projected/d84da5cc-8329-4e71-bd5f-0a3db9819952-kube-api-access-ddvxs\") pod \"nova-metadata-0\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " pod="openstack/nova-metadata-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.089194 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.111058 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.382562 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-8dp9s" event={"ID":"2d9754a9-f6c3-405b-ad06-70e432e7eedc","Type":"ContainerStarted","Data":"38634acee11c071d79353a22f3314571d7e6b67e03cc63065a8a043ef3014fab"} Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.392805 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a2a213c6-43ad-4879-bef7-a2ac48ecd73c","Type":"ContainerDied","Data":"21e52d3a2005fcbda68c0c527f88e682adf28ab92882c97a9456bb8d5e450b7d"} Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.392897 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.392960 4935 scope.go:117] "RemoveContainer" containerID="c18d43334fa83bebdc71aec1608e41e9c4b55b67d0934790c6df7ab8832e1703" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.436296 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-8dp9s" podStartSLOduration=2.99204809 podStartE2EDuration="9.436273541s" podCreationTimestamp="2025-12-01 18:56:38 +0000 UTC" firstStartedPulling="2025-12-01 18:56:39.59018979 +0000 UTC m=+1613.611819039" lastFinishedPulling="2025-12-01 18:56:46.034415231 +0000 UTC m=+1620.056044490" observedRunningTime="2025-12-01 18:56:47.403007393 +0000 UTC m=+1621.424636652" watchObservedRunningTime="2025-12-01 18:56:47.436273541 +0000 UTC m=+1621.457902820" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.449719 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.464535 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.477427 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.479386 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.482528 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.491738 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.499057 4935 scope.go:117] "RemoveContainer" containerID="10e594f97af0fa8774d501c90c7705fdec6b29c3c079f11790026b9f332a5a06" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.553471 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ec6049-21a8-4428-b396-3b6604a69b76-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.555777 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ec6049-21a8-4428-b396-3b6604a69b76-config-data\") pod \"nova-api-0\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.555947 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14ec6049-21a8-4428-b396-3b6604a69b76-logs\") pod \"nova-api-0\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.556131 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6xmg\" (UniqueName: \"kubernetes.io/projected/14ec6049-21a8-4428-b396-3b6604a69b76-kube-api-access-v6xmg\") pod \"nova-api-0\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.638022 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.658282 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14ec6049-21a8-4428-b396-3b6604a69b76-logs\") pod \"nova-api-0\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.658350 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6xmg\" (UniqueName: \"kubernetes.io/projected/14ec6049-21a8-4428-b396-3b6604a69b76-kube-api-access-v6xmg\") pod \"nova-api-0\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.658391 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ec6049-21a8-4428-b396-3b6604a69b76-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.658657 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14ec6049-21a8-4428-b396-3b6604a69b76-logs\") pod \"nova-api-0\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.659024 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ec6049-21a8-4428-b396-3b6604a69b76-config-data\") pod \"nova-api-0\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.667310 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ec6049-21a8-4428-b396-3b6604a69b76-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.667365 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ec6049-21a8-4428-b396-3b6604a69b76-config-data\") pod \"nova-api-0\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.675502 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6xmg\" (UniqueName: \"kubernetes.io/projected/14ec6049-21a8-4428-b396-3b6604a69b76-kube-api-access-v6xmg\") pod \"nova-api-0\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.798966 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:56:47 crc kubenswrapper[4935]: I1201 18:56:47.826541 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:56:47 crc kubenswrapper[4935]: W1201 18:56:47.841984 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd84da5cc_8329_4e71_bd5f_0a3db9819952.slice/crio-a8f1d017350f4b07b64ed8a65e8f089e84100afa10f2e83205be6f18f3790ca1 WatchSource:0}: Error finding container a8f1d017350f4b07b64ed8a65e8f089e84100afa10f2e83205be6f18f3790ca1: Status 404 returned error can't find the container with id a8f1d017350f4b07b64ed8a65e8f089e84100afa10f2e83205be6f18f3790ca1 Dec 01 18:56:48 crc kubenswrapper[4935]: I1201 18:56:48.298985 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:56:48 crc kubenswrapper[4935]: W1201 18:56:48.302886 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14ec6049_21a8_4428_b396_3b6604a69b76.slice/crio-39766c8946bbcdb38e32947c5f922ca86ab743da7a777df37df08a07930cdee9 WatchSource:0}: Error finding container 39766c8946bbcdb38e32947c5f922ca86ab743da7a777df37df08a07930cdee9: Status 404 returned error can't find the container with id 39766c8946bbcdb38e32947c5f922ca86ab743da7a777df37df08a07930cdee9 Dec 01 18:56:48 crc kubenswrapper[4935]: I1201 18:56:48.404822 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14ec6049-21a8-4428-b396-3b6604a69b76","Type":"ContainerStarted","Data":"39766c8946bbcdb38e32947c5f922ca86ab743da7a777df37df08a07930cdee9"} Dec 01 18:56:48 crc kubenswrapper[4935]: I1201 18:56:48.409461 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d38f760d-4c52-437b-b407-778866305310","Type":"ContainerStarted","Data":"7d9916b38a0b4de328628de1c63f8d753876e9fcdc4b180c880def9e61a553ed"} Dec 01 18:56:48 crc kubenswrapper[4935]: I1201 18:56:48.409505 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d38f760d-4c52-437b-b407-778866305310","Type":"ContainerStarted","Data":"bd99f0aacf35a5943edb8ef623cada9f3471bf05835190575c3b38b00fd9143c"} Dec 01 18:56:48 crc kubenswrapper[4935]: I1201 18:56:48.412864 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d84da5cc-8329-4e71-bd5f-0a3db9819952","Type":"ContainerStarted","Data":"2df6e687ea7b34f4a546010b79948dc91eafd8118c9b85859c0c7e3bf0296b04"} Dec 01 18:56:48 crc kubenswrapper[4935]: I1201 18:56:48.412944 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d84da5cc-8329-4e71-bd5f-0a3db9819952","Type":"ContainerStarted","Data":"a8f1d017350f4b07b64ed8a65e8f089e84100afa10f2e83205be6f18f3790ca1"} Dec 01 18:56:48 crc kubenswrapper[4935]: I1201 18:56:48.427568 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.427551878 podStartE2EDuration="2.427551878s" podCreationTimestamp="2025-12-01 18:56:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:56:48.42635915 +0000 UTC m=+1622.447988419" watchObservedRunningTime="2025-12-01 18:56:48.427551878 +0000 UTC m=+1622.449181137" Dec 01 18:56:48 crc kubenswrapper[4935]: I1201 18:56:48.527583 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f03c9a2-be16-4f6c-9585-c4b7b0dbe150" path="/var/lib/kubelet/pods/3f03c9a2-be16-4f6c-9585-c4b7b0dbe150/volumes" Dec 01 18:56:48 crc kubenswrapper[4935]: I1201 18:56:48.528524 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cebec77-38c2-47dc-8426-6f6802ab516a" path="/var/lib/kubelet/pods/4cebec77-38c2-47dc-8426-6f6802ab516a/volumes" Dec 01 18:56:48 crc kubenswrapper[4935]: I1201 18:56:48.529064 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2a213c6-43ad-4879-bef7-a2ac48ecd73c" path="/var/lib/kubelet/pods/a2a213c6-43ad-4879-bef7-a2ac48ecd73c/volumes" Dec 01 18:56:49 crc kubenswrapper[4935]: I1201 18:56:49.481972 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14ec6049-21a8-4428-b396-3b6604a69b76","Type":"ContainerStarted","Data":"0a9568b48e0dc79df5b7fb49b367ddf286d65968c71304c6dd265ee64ea3ca56"} Dec 01 18:56:49 crc kubenswrapper[4935]: I1201 18:56:49.482286 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14ec6049-21a8-4428-b396-3b6604a69b76","Type":"ContainerStarted","Data":"1469ca41b35f2070b3cee06504923e11ac14f0b604552f74f5186cde780a47a4"} Dec 01 18:56:49 crc kubenswrapper[4935]: I1201 18:56:49.486259 4935 generic.go:334] "Generic (PLEG): container finished" podID="2d9754a9-f6c3-405b-ad06-70e432e7eedc" containerID="38634acee11c071d79353a22f3314571d7e6b67e03cc63065a8a043ef3014fab" exitCode=0 Dec 01 18:56:49 crc kubenswrapper[4935]: I1201 18:56:49.486380 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-8dp9s" event={"ID":"2d9754a9-f6c3-405b-ad06-70e432e7eedc","Type":"ContainerDied","Data":"38634acee11c071d79353a22f3314571d7e6b67e03cc63065a8a043ef3014fab"} Dec 01 18:56:49 crc kubenswrapper[4935]: I1201 18:56:49.488719 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d84da5cc-8329-4e71-bd5f-0a3db9819952","Type":"ContainerStarted","Data":"2cd8357229005f141479735a80d42ca52db6f3a8cc4b0ac74c976dc362259312"} Dec 01 18:56:49 crc kubenswrapper[4935]: I1201 18:56:49.592251 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.592223566 podStartE2EDuration="2.592223566s" podCreationTimestamp="2025-12-01 18:56:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:56:49.589995156 +0000 UTC m=+1623.611624455" watchObservedRunningTime="2025-12-01 18:56:49.592223566 +0000 UTC m=+1623.613852865" Dec 01 18:56:49 crc kubenswrapper[4935]: I1201 18:56:49.645037 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.645008679 podStartE2EDuration="3.645008679s" podCreationTimestamp="2025-12-01 18:56:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:56:49.617763121 +0000 UTC m=+1623.639392420" watchObservedRunningTime="2025-12-01 18:56:49.645008679 +0000 UTC m=+1623.666637978" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.041464 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.147740 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-config-data\") pod \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.147911 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-scripts\") pod \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.147966 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-combined-ca-bundle\") pod \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.148116 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9tt6\" (UniqueName: \"kubernetes.io/projected/2d9754a9-f6c3-405b-ad06-70e432e7eedc-kube-api-access-n9tt6\") pod \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\" (UID: \"2d9754a9-f6c3-405b-ad06-70e432e7eedc\") " Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.155354 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-scripts" (OuterVolumeSpecName: "scripts") pod "2d9754a9-f6c3-405b-ad06-70e432e7eedc" (UID: "2d9754a9-f6c3-405b-ad06-70e432e7eedc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.156137 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d9754a9-f6c3-405b-ad06-70e432e7eedc-kube-api-access-n9tt6" (OuterVolumeSpecName: "kube-api-access-n9tt6") pod "2d9754a9-f6c3-405b-ad06-70e432e7eedc" (UID: "2d9754a9-f6c3-405b-ad06-70e432e7eedc"). InnerVolumeSpecName "kube-api-access-n9tt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.204351 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d9754a9-f6c3-405b-ad06-70e432e7eedc" (UID: "2d9754a9-f6c3-405b-ad06-70e432e7eedc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.206923 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-config-data" (OuterVolumeSpecName: "config-data") pod "2d9754a9-f6c3-405b-ad06-70e432e7eedc" (UID: "2d9754a9-f6c3-405b-ad06-70e432e7eedc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.252478 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9tt6\" (UniqueName: \"kubernetes.io/projected/2d9754a9-f6c3-405b-ad06-70e432e7eedc-kube-api-access-n9tt6\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.252517 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.252531 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.252542 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d9754a9-f6c3-405b-ad06-70e432e7eedc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.523793 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-8dp9s" event={"ID":"2d9754a9-f6c3-405b-ad06-70e432e7eedc","Type":"ContainerDied","Data":"9f411d27838fe809e6c89cc56921304ee13b8d46eadea2ec0c5a2ac253dbb263"} Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.523851 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f411d27838fe809e6c89cc56921304ee13b8d46eadea2ec0c5a2ac253dbb263" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.523896 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-8dp9s" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.561757 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.920906 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Dec 01 18:56:51 crc kubenswrapper[4935]: E1201 18:56:51.921638 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d9754a9-f6c3-405b-ad06-70e432e7eedc" containerName="aodh-db-sync" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.921659 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d9754a9-f6c3-405b-ad06-70e432e7eedc" containerName="aodh-db-sync" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.921945 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d9754a9-f6c3-405b-ad06-70e432e7eedc" containerName="aodh-db-sync" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.924629 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.931797 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-l8dhf" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.932659 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.933053 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.973078 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-config-data\") pod \"aodh-0\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " pod="openstack/aodh-0" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.973395 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7pdw\" (UniqueName: \"kubernetes.io/projected/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-kube-api-access-r7pdw\") pod \"aodh-0\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " pod="openstack/aodh-0" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.973439 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-scripts\") pod \"aodh-0\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " pod="openstack/aodh-0" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.973609 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-combined-ca-bundle\") pod \"aodh-0\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " pod="openstack/aodh-0" Dec 01 18:56:51 crc kubenswrapper[4935]: I1201 18:56:51.980802 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 01 18:56:52 crc kubenswrapper[4935]: I1201 18:56:52.076346 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-config-data\") pod \"aodh-0\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " pod="openstack/aodh-0" Dec 01 18:56:52 crc kubenswrapper[4935]: I1201 18:56:52.076943 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7pdw\" (UniqueName: \"kubernetes.io/projected/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-kube-api-access-r7pdw\") pod \"aodh-0\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " pod="openstack/aodh-0" Dec 01 18:56:52 crc kubenswrapper[4935]: I1201 18:56:52.077420 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-scripts\") pod \"aodh-0\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " pod="openstack/aodh-0" Dec 01 18:56:52 crc kubenswrapper[4935]: I1201 18:56:52.077484 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-combined-ca-bundle\") pod \"aodh-0\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " pod="openstack/aodh-0" Dec 01 18:56:52 crc kubenswrapper[4935]: I1201 18:56:52.082910 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-scripts\") pod \"aodh-0\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " pod="openstack/aodh-0" Dec 01 18:56:52 crc kubenswrapper[4935]: I1201 18:56:52.083213 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-combined-ca-bundle\") pod \"aodh-0\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " pod="openstack/aodh-0" Dec 01 18:56:52 crc kubenswrapper[4935]: I1201 18:56:52.083270 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-config-data\") pod \"aodh-0\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " pod="openstack/aodh-0" Dec 01 18:56:52 crc kubenswrapper[4935]: I1201 18:56:52.089719 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 18:56:52 crc kubenswrapper[4935]: I1201 18:56:52.092874 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7pdw\" (UniqueName: \"kubernetes.io/projected/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-kube-api-access-r7pdw\") pod \"aodh-0\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " pod="openstack/aodh-0" Dec 01 18:56:52 crc kubenswrapper[4935]: I1201 18:56:52.113307 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 18:56:52 crc kubenswrapper[4935]: I1201 18:56:52.113427 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 18:56:52 crc kubenswrapper[4935]: I1201 18:56:52.256494 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 01 18:56:52 crc kubenswrapper[4935]: I1201 18:56:52.770947 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 01 18:56:52 crc kubenswrapper[4935]: W1201 18:56:52.797678 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf98ab763_d98c_4ad6_bc2e_a943c33b43fc.slice/crio-758c94a70926137495fc9195079092e3f19c1f7c1d799824cd5a9ec66654f46e WatchSource:0}: Error finding container 758c94a70926137495fc9195079092e3f19c1f7c1d799824cd5a9ec66654f46e: Status 404 returned error can't find the container with id 758c94a70926137495fc9195079092e3f19c1f7c1d799824cd5a9ec66654f46e Dec 01 18:56:53 crc kubenswrapper[4935]: I1201 18:56:53.569896 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f98ab763-d98c-4ad6-bc2e-a943c33b43fc","Type":"ContainerStarted","Data":"758c94a70926137495fc9195079092e3f19c1f7c1d799824cd5a9ec66654f46e"} Dec 01 18:56:55 crc kubenswrapper[4935]: I1201 18:56:55.369438 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 01 18:56:55 crc kubenswrapper[4935]: I1201 18:56:55.508879 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:56:55 crc kubenswrapper[4935]: E1201 18:56:55.509640 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:56:55 crc kubenswrapper[4935]: I1201 18:56:55.591691 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f98ab763-d98c-4ad6-bc2e-a943c33b43fc","Type":"ContainerStarted","Data":"4d599b685d5849424ae4b6b625284b667dfe1d7330621fb0468a0340e8587d2c"} Dec 01 18:56:56 crc kubenswrapper[4935]: I1201 18:56:56.034766 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:56 crc kubenswrapper[4935]: I1201 18:56:56.035278 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="ceilometer-central-agent" containerID="cri-o://6f32732461324016276ca8cc47de8a194981e73f53cdb3d72e801103b383658a" gracePeriod=30 Dec 01 18:56:56 crc kubenswrapper[4935]: I1201 18:56:56.035795 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="proxy-httpd" containerID="cri-o://0f3ba03372e14c62035472c683d2a6cea58dea48ceac627866555fdc3a95680a" gracePeriod=30 Dec 01 18:56:56 crc kubenswrapper[4935]: I1201 18:56:56.035818 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="sg-core" containerID="cri-o://b99a8d17e1142106c6866ae551f731420401a8ee4745bf7aab7e51291d057920" gracePeriod=30 Dec 01 18:56:56 crc kubenswrapper[4935]: I1201 18:56:56.035872 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="ceilometer-notification-agent" containerID="cri-o://14dc81d1e49832d8e41343ea5639a060f501f4bc6160210bfa069b066a5a043b" gracePeriod=30 Dec 01 18:56:56 crc kubenswrapper[4935]: I1201 18:56:56.053107 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.240:3000/\": EOF" Dec 01 18:56:56 crc kubenswrapper[4935]: I1201 18:56:56.603558 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f98ab763-d98c-4ad6-bc2e-a943c33b43fc","Type":"ContainerStarted","Data":"015bd3e6734d860364fe741192bd980d244211e23c4caca02c351ebad437f6f0"} Dec 01 18:56:56 crc kubenswrapper[4935]: I1201 18:56:56.608046 4935 generic.go:334] "Generic (PLEG): container finished" podID="53308b1c-fda7-40fe-b113-6784940740d9" containerID="0f3ba03372e14c62035472c683d2a6cea58dea48ceac627866555fdc3a95680a" exitCode=0 Dec 01 18:56:56 crc kubenswrapper[4935]: I1201 18:56:56.608084 4935 generic.go:334] "Generic (PLEG): container finished" podID="53308b1c-fda7-40fe-b113-6784940740d9" containerID="b99a8d17e1142106c6866ae551f731420401a8ee4745bf7aab7e51291d057920" exitCode=2 Dec 01 18:56:56 crc kubenswrapper[4935]: I1201 18:56:56.608103 4935 generic.go:334] "Generic (PLEG): container finished" podID="53308b1c-fda7-40fe-b113-6784940740d9" containerID="6f32732461324016276ca8cc47de8a194981e73f53cdb3d72e801103b383658a" exitCode=0 Dec 01 18:56:56 crc kubenswrapper[4935]: I1201 18:56:56.608143 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53308b1c-fda7-40fe-b113-6784940740d9","Type":"ContainerDied","Data":"0f3ba03372e14c62035472c683d2a6cea58dea48ceac627866555fdc3a95680a"} Dec 01 18:56:56 crc kubenswrapper[4935]: I1201 18:56:56.608252 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53308b1c-fda7-40fe-b113-6784940740d9","Type":"ContainerDied","Data":"b99a8d17e1142106c6866ae551f731420401a8ee4745bf7aab7e51291d057920"} Dec 01 18:56:56 crc kubenswrapper[4935]: I1201 18:56:56.608273 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53308b1c-fda7-40fe-b113-6784940740d9","Type":"ContainerDied","Data":"6f32732461324016276ca8cc47de8a194981e73f53cdb3d72e801103b383658a"} Dec 01 18:56:57 crc kubenswrapper[4935]: I1201 18:56:57.090328 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 18:56:57 crc kubenswrapper[4935]: I1201 18:56:57.111672 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 18:56:57 crc kubenswrapper[4935]: I1201 18:56:57.111714 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 18:56:57 crc kubenswrapper[4935]: I1201 18:56:57.139683 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 18:56:57 crc kubenswrapper[4935]: I1201 18:56:57.621563 4935 generic.go:334] "Generic (PLEG): container finished" podID="53308b1c-fda7-40fe-b113-6784940740d9" containerID="14dc81d1e49832d8e41343ea5639a060f501f4bc6160210bfa069b066a5a043b" exitCode=0 Dec 01 18:56:57 crc kubenswrapper[4935]: I1201 18:56:57.621649 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53308b1c-fda7-40fe-b113-6784940740d9","Type":"ContainerDied","Data":"14dc81d1e49832d8e41343ea5639a060f501f4bc6160210bfa069b066a5a043b"} Dec 01 18:56:57 crc kubenswrapper[4935]: I1201 18:56:57.662809 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 18:56:57 crc kubenswrapper[4935]: I1201 18:56:57.799850 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 18:56:57 crc kubenswrapper[4935]: I1201 18:56:57.800107 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 18:56:57 crc kubenswrapper[4935]: I1201 18:56:57.986825 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.131301 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d84da5cc-8329-4e71-bd5f-0a3db9819952" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.245:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.131570 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d84da5cc-8329-4e71-bd5f-0a3db9819952" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.245:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.146436 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-scripts\") pod \"53308b1c-fda7-40fe-b113-6784940740d9\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.146710 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-config-data\") pod \"53308b1c-fda7-40fe-b113-6784940740d9\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.146773 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-combined-ca-bundle\") pod \"53308b1c-fda7-40fe-b113-6784940740d9\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.146935 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53308b1c-fda7-40fe-b113-6784940740d9-log-httpd\") pod \"53308b1c-fda7-40fe-b113-6784940740d9\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.146962 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53308b1c-fda7-40fe-b113-6784940740d9-run-httpd\") pod \"53308b1c-fda7-40fe-b113-6784940740d9\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.147388 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-sg-core-conf-yaml\") pod \"53308b1c-fda7-40fe-b113-6784940740d9\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.147401 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53308b1c-fda7-40fe-b113-6784940740d9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "53308b1c-fda7-40fe-b113-6784940740d9" (UID: "53308b1c-fda7-40fe-b113-6784940740d9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.147451 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwfpg\" (UniqueName: \"kubernetes.io/projected/53308b1c-fda7-40fe-b113-6784940740d9-kube-api-access-zwfpg\") pod \"53308b1c-fda7-40fe-b113-6784940740d9\" (UID: \"53308b1c-fda7-40fe-b113-6784940740d9\") " Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.147464 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53308b1c-fda7-40fe-b113-6784940740d9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "53308b1c-fda7-40fe-b113-6784940740d9" (UID: "53308b1c-fda7-40fe-b113-6784940740d9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.148328 4935 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53308b1c-fda7-40fe-b113-6784940740d9-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.148363 4935 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53308b1c-fda7-40fe-b113-6784940740d9-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.176208 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-scripts" (OuterVolumeSpecName: "scripts") pod "53308b1c-fda7-40fe-b113-6784940740d9" (UID: "53308b1c-fda7-40fe-b113-6784940740d9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.180431 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53308b1c-fda7-40fe-b113-6784940740d9-kube-api-access-zwfpg" (OuterVolumeSpecName: "kube-api-access-zwfpg") pod "53308b1c-fda7-40fe-b113-6784940740d9" (UID: "53308b1c-fda7-40fe-b113-6784940740d9"). InnerVolumeSpecName "kube-api-access-zwfpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.187230 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "53308b1c-fda7-40fe-b113-6784940740d9" (UID: "53308b1c-fda7-40fe-b113-6784940740d9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.245574 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "53308b1c-fda7-40fe-b113-6784940740d9" (UID: "53308b1c-fda7-40fe-b113-6784940740d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.250187 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwfpg\" (UniqueName: \"kubernetes.io/projected/53308b1c-fda7-40fe-b113-6784940740d9-kube-api-access-zwfpg\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.250367 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.250425 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.250560 4935 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.309259 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-config-data" (OuterVolumeSpecName: "config-data") pod "53308b1c-fda7-40fe-b113-6784940740d9" (UID: "53308b1c-fda7-40fe-b113-6784940740d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.352242 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53308b1c-fda7-40fe-b113-6784940740d9-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.637963 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53308b1c-fda7-40fe-b113-6784940740d9","Type":"ContainerDied","Data":"dcd23b863790e5e53e3531c119182d89309cd54a69af499edc2d138246deb1bb"} Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.637996 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.638039 4935 scope.go:117] "RemoveContainer" containerID="0f3ba03372e14c62035472c683d2a6cea58dea48ceac627866555fdc3a95680a" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.640895 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f98ab763-d98c-4ad6-bc2e-a943c33b43fc","Type":"ContainerStarted","Data":"415fbfc5d1f5f339b420457e74ae6b4efa8aa1cc05473e935e0db9969571d772"} Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.667202 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.681269 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.700823 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:58 crc kubenswrapper[4935]: E1201 18:56:58.701335 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="ceilometer-notification-agent" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.701349 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="ceilometer-notification-agent" Dec 01 18:56:58 crc kubenswrapper[4935]: E1201 18:56:58.701371 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="sg-core" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.701378 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="sg-core" Dec 01 18:56:58 crc kubenswrapper[4935]: E1201 18:56:58.701394 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="proxy-httpd" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.701401 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="proxy-httpd" Dec 01 18:56:58 crc kubenswrapper[4935]: E1201 18:56:58.701419 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="ceilometer-central-agent" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.701425 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="ceilometer-central-agent" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.701671 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="ceilometer-central-agent" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.701729 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="proxy-httpd" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.701742 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="ceilometer-notification-agent" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.701753 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="53308b1c-fda7-40fe-b113-6784940740d9" containerName="sg-core" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.705805 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.709119 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.709307 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.729699 4935 scope.go:117] "RemoveContainer" containerID="b99a8d17e1142106c6866ae551f731420401a8ee4745bf7aab7e51291d057920" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.737671 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.761104 4935 scope.go:117] "RemoveContainer" containerID="14dc81d1e49832d8e41343ea5639a060f501f4bc6160210bfa069b066a5a043b" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.781655 4935 scope.go:117] "RemoveContainer" containerID="6f32732461324016276ca8cc47de8a194981e73f53cdb3d72e801103b383658a" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.862821 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzlpm\" (UniqueName: \"kubernetes.io/projected/5d343d91-86b0-48cc-b85d-fa7732e5ef47-kube-api-access-rzlpm\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.863257 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d343d91-86b0-48cc-b85d-fa7732e5ef47-run-httpd\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.863514 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-config-data\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.863905 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.864017 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-scripts\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.864067 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.864266 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d343d91-86b0-48cc-b85d-fa7732e5ef47-log-httpd\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.884379 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="14ec6049-21a8-4428-b396-3b6604a69b76" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.246:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.884382 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="14ec6049-21a8-4428-b396-3b6604a69b76" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.246:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.967503 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d343d91-86b0-48cc-b85d-fa7732e5ef47-log-httpd\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.967713 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzlpm\" (UniqueName: \"kubernetes.io/projected/5d343d91-86b0-48cc-b85d-fa7732e5ef47-kube-api-access-rzlpm\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.967870 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d343d91-86b0-48cc-b85d-fa7732e5ef47-run-httpd\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.967983 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-config-data\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.968183 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.968260 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-scripts\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.968289 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.968987 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d343d91-86b0-48cc-b85d-fa7732e5ef47-run-httpd\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.969191 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d343d91-86b0-48cc-b85d-fa7732e5ef47-log-httpd\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.973722 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-scripts\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.974213 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.976115 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-config-data\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.989382 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzlpm\" (UniqueName: \"kubernetes.io/projected/5d343d91-86b0-48cc-b85d-fa7732e5ef47-kube-api-access-rzlpm\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:58 crc kubenswrapper[4935]: I1201 18:56:58.989552 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " pod="openstack/ceilometer-0" Dec 01 18:56:59 crc kubenswrapper[4935]: I1201 18:56:59.046041 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:56:59 crc kubenswrapper[4935]: I1201 18:56:59.534853 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:56:59 crc kubenswrapper[4935]: W1201 18:56:59.733827 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d343d91_86b0_48cc_b85d_fa7732e5ef47.slice/crio-86ca72493c6a4e2aef52bfb71dafebfc7cd94a9b459310a70e055008572fbfb4 WatchSource:0}: Error finding container 86ca72493c6a4e2aef52bfb71dafebfc7cd94a9b459310a70e055008572fbfb4: Status 404 returned error can't find the container with id 86ca72493c6a4e2aef52bfb71dafebfc7cd94a9b459310a70e055008572fbfb4 Dec 01 18:57:00 crc kubenswrapper[4935]: I1201 18:57:00.522551 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53308b1c-fda7-40fe-b113-6784940740d9" path="/var/lib/kubelet/pods/53308b1c-fda7-40fe-b113-6784940740d9/volumes" Dec 01 18:57:00 crc kubenswrapper[4935]: I1201 18:57:00.679184 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5d343d91-86b0-48cc-b85d-fa7732e5ef47","Type":"ContainerStarted","Data":"86ca72493c6a4e2aef52bfb71dafebfc7cd94a9b459310a70e055008572fbfb4"} Dec 01 18:57:00 crc kubenswrapper[4935]: I1201 18:57:00.690010 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f98ab763-d98c-4ad6-bc2e-a943c33b43fc","Type":"ContainerStarted","Data":"9346e8bfbc1d2b891d3134fba4cea1b121e0a29b099d098b7ca194e78e56a1ca"} Dec 01 18:57:00 crc kubenswrapper[4935]: I1201 18:57:00.690300 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-api" containerID="cri-o://4d599b685d5849424ae4b6b625284b667dfe1d7330621fb0468a0340e8587d2c" gracePeriod=30 Dec 01 18:57:00 crc kubenswrapper[4935]: I1201 18:57:00.690388 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-listener" containerID="cri-o://9346e8bfbc1d2b891d3134fba4cea1b121e0a29b099d098b7ca194e78e56a1ca" gracePeriod=30 Dec 01 18:57:00 crc kubenswrapper[4935]: I1201 18:57:00.690451 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-notifier" containerID="cri-o://415fbfc5d1f5f339b420457e74ae6b4efa8aa1cc05473e935e0db9969571d772" gracePeriod=30 Dec 01 18:57:00 crc kubenswrapper[4935]: I1201 18:57:00.690528 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-evaluator" containerID="cri-o://015bd3e6734d860364fe741192bd980d244211e23c4caca02c351ebad437f6f0" gracePeriod=30 Dec 01 18:57:00 crc kubenswrapper[4935]: I1201 18:57:00.742824 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.766697348 podStartE2EDuration="9.742799404s" podCreationTimestamp="2025-12-01 18:56:51 +0000 UTC" firstStartedPulling="2025-12-01 18:56:52.806895373 +0000 UTC m=+1626.828524632" lastFinishedPulling="2025-12-01 18:56:59.782997429 +0000 UTC m=+1633.804626688" observedRunningTime="2025-12-01 18:57:00.710906999 +0000 UTC m=+1634.732536288" watchObservedRunningTime="2025-12-01 18:57:00.742799404 +0000 UTC m=+1634.764428673" Dec 01 18:57:01 crc kubenswrapper[4935]: I1201 18:57:01.706554 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5d343d91-86b0-48cc-b85d-fa7732e5ef47","Type":"ContainerStarted","Data":"f6be3bfc1d75dae7678d8fc953cd3824b238b95c4fe2e5a4c277777b43e575a1"} Dec 01 18:57:01 crc kubenswrapper[4935]: I1201 18:57:01.717162 4935 generic.go:334] "Generic (PLEG): container finished" podID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerID="015bd3e6734d860364fe741192bd980d244211e23c4caca02c351ebad437f6f0" exitCode=0 Dec 01 18:57:01 crc kubenswrapper[4935]: I1201 18:57:01.717182 4935 generic.go:334] "Generic (PLEG): container finished" podID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerID="4d599b685d5849424ae4b6b625284b667dfe1d7330621fb0468a0340e8587d2c" exitCode=0 Dec 01 18:57:01 crc kubenswrapper[4935]: I1201 18:57:01.717403 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f98ab763-d98c-4ad6-bc2e-a943c33b43fc","Type":"ContainerDied","Data":"015bd3e6734d860364fe741192bd980d244211e23c4caca02c351ebad437f6f0"} Dec 01 18:57:01 crc kubenswrapper[4935]: I1201 18:57:01.717432 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f98ab763-d98c-4ad6-bc2e-a943c33b43fc","Type":"ContainerDied","Data":"4d599b685d5849424ae4b6b625284b667dfe1d7330621fb0468a0340e8587d2c"} Dec 01 18:57:02 crc kubenswrapper[4935]: I1201 18:57:02.746718 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5d343d91-86b0-48cc-b85d-fa7732e5ef47","Type":"ContainerStarted","Data":"93f3101e455850c4add6f1f62a151c95d4ccbbab3cc9b6c3e647f7afd4763979"} Dec 01 18:57:03 crc kubenswrapper[4935]: I1201 18:57:03.765039 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5d343d91-86b0-48cc-b85d-fa7732e5ef47","Type":"ContainerStarted","Data":"a0f0f859551f1d13e0697eefb8df301306c38f57554742a889c212d272cb3764"} Dec 01 18:57:04 crc kubenswrapper[4935]: I1201 18:57:04.781038 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5d343d91-86b0-48cc-b85d-fa7732e5ef47","Type":"ContainerStarted","Data":"591716c8dd63cbfa49de5ae3c3e787834f68b0e939f0af5da8a47b3468bd651e"} Dec 01 18:57:05 crc kubenswrapper[4935]: I1201 18:57:05.795833 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 18:57:05 crc kubenswrapper[4935]: I1201 18:57:05.831863 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.337400885 podStartE2EDuration="7.831834625s" podCreationTimestamp="2025-12-01 18:56:58 +0000 UTC" firstStartedPulling="2025-12-01 18:56:59.736835905 +0000 UTC m=+1633.758465164" lastFinishedPulling="2025-12-01 18:57:04.231269615 +0000 UTC m=+1638.252898904" observedRunningTime="2025-12-01 18:57:05.820103445 +0000 UTC m=+1639.841732734" watchObservedRunningTime="2025-12-01 18:57:05.831834625 +0000 UTC m=+1639.853463924" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.520990 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.606996 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4333c8e-1218-41bd-9323-dbd9372366fd-combined-ca-bundle\") pod \"b4333c8e-1218-41bd-9323-dbd9372366fd\" (UID: \"b4333c8e-1218-41bd-9323-dbd9372366fd\") " Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.607442 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kv5pr\" (UniqueName: \"kubernetes.io/projected/b4333c8e-1218-41bd-9323-dbd9372366fd-kube-api-access-kv5pr\") pod \"b4333c8e-1218-41bd-9323-dbd9372366fd\" (UID: \"b4333c8e-1218-41bd-9323-dbd9372366fd\") " Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.607561 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4333c8e-1218-41bd-9323-dbd9372366fd-config-data\") pod \"b4333c8e-1218-41bd-9323-dbd9372366fd\" (UID: \"b4333c8e-1218-41bd-9323-dbd9372366fd\") " Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.613285 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4333c8e-1218-41bd-9323-dbd9372366fd-kube-api-access-kv5pr" (OuterVolumeSpecName: "kube-api-access-kv5pr") pod "b4333c8e-1218-41bd-9323-dbd9372366fd" (UID: "b4333c8e-1218-41bd-9323-dbd9372366fd"). InnerVolumeSpecName "kube-api-access-kv5pr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.639739 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4333c8e-1218-41bd-9323-dbd9372366fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4333c8e-1218-41bd-9323-dbd9372366fd" (UID: "b4333c8e-1218-41bd-9323-dbd9372366fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.659344 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4333c8e-1218-41bd-9323-dbd9372366fd-config-data" (OuterVolumeSpecName: "config-data") pod "b4333c8e-1218-41bd-9323-dbd9372366fd" (UID: "b4333c8e-1218-41bd-9323-dbd9372366fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.712098 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4333c8e-1218-41bd-9323-dbd9372366fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.712136 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kv5pr\" (UniqueName: \"kubernetes.io/projected/b4333c8e-1218-41bd-9323-dbd9372366fd-kube-api-access-kv5pr\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.712175 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4333c8e-1218-41bd-9323-dbd9372366fd-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.804862 4935 generic.go:334] "Generic (PLEG): container finished" podID="b4333c8e-1218-41bd-9323-dbd9372366fd" containerID="31501ef3bdf67ee6263e4d2a8791cb158ae4e02e6ad2602d1c33ab6ebd79c4ad" exitCode=137 Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.804931 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.804939 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b4333c8e-1218-41bd-9323-dbd9372366fd","Type":"ContainerDied","Data":"31501ef3bdf67ee6263e4d2a8791cb158ae4e02e6ad2602d1c33ab6ebd79c4ad"} Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.804973 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b4333c8e-1218-41bd-9323-dbd9372366fd","Type":"ContainerDied","Data":"fca8711b756674966a659fc48daf7524ffffc90f661c4451b08b3aff2eb52b47"} Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.804990 4935 scope.go:117] "RemoveContainer" containerID="31501ef3bdf67ee6263e4d2a8791cb158ae4e02e6ad2602d1c33ab6ebd79c4ad" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.851021 4935 scope.go:117] "RemoveContainer" containerID="31501ef3bdf67ee6263e4d2a8791cb158ae4e02e6ad2602d1c33ab6ebd79c4ad" Dec 01 18:57:06 crc kubenswrapper[4935]: E1201 18:57:06.851643 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31501ef3bdf67ee6263e4d2a8791cb158ae4e02e6ad2602d1c33ab6ebd79c4ad\": container with ID starting with 31501ef3bdf67ee6263e4d2a8791cb158ae4e02e6ad2602d1c33ab6ebd79c4ad not found: ID does not exist" containerID="31501ef3bdf67ee6263e4d2a8791cb158ae4e02e6ad2602d1c33ab6ebd79c4ad" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.851687 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31501ef3bdf67ee6263e4d2a8791cb158ae4e02e6ad2602d1c33ab6ebd79c4ad"} err="failed to get container status \"31501ef3bdf67ee6263e4d2a8791cb158ae4e02e6ad2602d1c33ab6ebd79c4ad\": rpc error: code = NotFound desc = could not find container \"31501ef3bdf67ee6263e4d2a8791cb158ae4e02e6ad2602d1c33ab6ebd79c4ad\": container with ID starting with 31501ef3bdf67ee6263e4d2a8791cb158ae4e02e6ad2602d1c33ab6ebd79c4ad not found: ID does not exist" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.854316 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.864911 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.885094 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 18:57:06 crc kubenswrapper[4935]: E1201 18:57:06.885760 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4333c8e-1218-41bd-9323-dbd9372366fd" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.885784 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4333c8e-1218-41bd-9323-dbd9372366fd" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.886121 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4333c8e-1218-41bd-9323-dbd9372366fd" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.887269 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.890379 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.890572 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.892294 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.896996 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.918408 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.918745 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.918811 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.918903 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdzk5\" (UniqueName: \"kubernetes.io/projected/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-kube-api-access-wdzk5\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:06 crc kubenswrapper[4935]: I1201 18:57:06.919118 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.021406 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.021462 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.021551 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdzk5\" (UniqueName: \"kubernetes.io/projected/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-kube-api-access-wdzk5\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.021646 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.021676 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.026418 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.027254 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.032681 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.034660 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.037076 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdzk5\" (UniqueName: \"kubernetes.io/projected/ff66b34c-0f33-4ac3-a71d-0470cd0b8517-kube-api-access-wdzk5\") pod \"nova-cell1-novncproxy-0\" (UID: \"ff66b34c-0f33-4ac3-a71d-0470cd0b8517\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.117107 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.119029 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.131751 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.248334 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.508054 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:57:07 crc kubenswrapper[4935]: E1201 18:57:07.508545 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.739440 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.807051 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.807538 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.807799 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.812305 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.824212 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ff66b34c-0f33-4ac3-a71d-0470cd0b8517","Type":"ContainerStarted","Data":"4d28a289cb69d0c89fb1a67e75d2a41bc65603565330f662e43c2afa950835be"} Dec 01 18:57:07 crc kubenswrapper[4935]: I1201 18:57:07.846087 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 18:57:08 crc kubenswrapper[4935]: I1201 18:57:08.525655 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4333c8e-1218-41bd-9323-dbd9372366fd" path="/var/lib/kubelet/pods/b4333c8e-1218-41bd-9323-dbd9372366fd/volumes" Dec 01 18:57:08 crc kubenswrapper[4935]: I1201 18:57:08.854811 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ff66b34c-0f33-4ac3-a71d-0470cd0b8517","Type":"ContainerStarted","Data":"fd9cb724731da70053cddd64b57ef1282ce87efec5bb9d6ea82bd1c633506beb"} Dec 01 18:57:08 crc kubenswrapper[4935]: I1201 18:57:08.855853 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 18:57:08 crc kubenswrapper[4935]: I1201 18:57:08.864464 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 18:57:08 crc kubenswrapper[4935]: I1201 18:57:08.888765 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.888745651 podStartE2EDuration="2.888745651s" podCreationTimestamp="2025-12-01 18:57:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:57:08.880245964 +0000 UTC m=+1642.901875233" watchObservedRunningTime="2025-12-01 18:57:08.888745651 +0000 UTC m=+1642.910374920" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.079094 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f84f9ccf-6ht5k"] Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.081656 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.098231 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f84f9ccf-6ht5k"] Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.118521 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-dns-svc\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.118856 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-ovsdbserver-sb\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.118995 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-dns-swift-storage-0\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.119181 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-config\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.119390 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-ovsdbserver-nb\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.119513 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jt8n7\" (UniqueName: \"kubernetes.io/projected/31708c3f-f9b6-44bd-8d81-cee4bc817f49-kube-api-access-jt8n7\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.221166 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-ovsdbserver-sb\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.221418 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-dns-swift-storage-0\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.221522 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-config\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.221668 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jt8n7\" (UniqueName: \"kubernetes.io/projected/31708c3f-f9b6-44bd-8d81-cee4bc817f49-kube-api-access-jt8n7\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.221740 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-ovsdbserver-nb\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.221904 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-dns-svc\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.222213 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-ovsdbserver-sb\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.222809 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-dns-svc\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.223032 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-dns-swift-storage-0\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.224493 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-config\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.224613 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-ovsdbserver-nb\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.241625 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jt8n7\" (UniqueName: \"kubernetes.io/projected/31708c3f-f9b6-44bd-8d81-cee4bc817f49-kube-api-access-jt8n7\") pod \"dnsmasq-dns-f84f9ccf-6ht5k\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.437137 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:09 crc kubenswrapper[4935]: I1201 18:57:09.924272 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f84f9ccf-6ht5k"] Dec 01 18:57:10 crc kubenswrapper[4935]: I1201 18:57:10.873402 4935 generic.go:334] "Generic (PLEG): container finished" podID="31708c3f-f9b6-44bd-8d81-cee4bc817f49" containerID="9bf5d108cd2b7f51042cd99fcfb70b422597ab82bc70fea22542ea046dafe726" exitCode=0 Dec 01 18:57:10 crc kubenswrapper[4935]: I1201 18:57:10.873565 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" event={"ID":"31708c3f-f9b6-44bd-8d81-cee4bc817f49","Type":"ContainerDied","Data":"9bf5d108cd2b7f51042cd99fcfb70b422597ab82bc70fea22542ea046dafe726"} Dec 01 18:57:10 crc kubenswrapper[4935]: I1201 18:57:10.873773 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" event={"ID":"31708c3f-f9b6-44bd-8d81-cee4bc817f49","Type":"ContainerStarted","Data":"c34269e63c0256e13f44f2a5051289d5603dfc7dc8d40bc02456bbea6441a514"} Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.485487 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.706359 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.706711 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="ceilometer-central-agent" containerID="cri-o://f6be3bfc1d75dae7678d8fc953cd3824b238b95c4fe2e5a4c277777b43e575a1" gracePeriod=30 Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.706837 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="sg-core" containerID="cri-o://a0f0f859551f1d13e0697eefb8df301306c38f57554742a889c212d272cb3764" gracePeriod=30 Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.706877 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="ceilometer-notification-agent" containerID="cri-o://93f3101e455850c4add6f1f62a151c95d4ccbbab3cc9b6c3e647f7afd4763979" gracePeriod=30 Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.707111 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="proxy-httpd" containerID="cri-o://591716c8dd63cbfa49de5ae3c3e787834f68b0e939f0af5da8a47b3468bd651e" gracePeriod=30 Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.890959 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" event={"ID":"31708c3f-f9b6-44bd-8d81-cee4bc817f49","Type":"ContainerStarted","Data":"a41aead54c79546f03894f72d0f8b7afbfd8b5220d945ac3bbb5b7fd7dbe5bbc"} Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.891423 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.896588 4935 generic.go:334] "Generic (PLEG): container finished" podID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerID="591716c8dd63cbfa49de5ae3c3e787834f68b0e939f0af5da8a47b3468bd651e" exitCode=0 Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.896656 4935 generic.go:334] "Generic (PLEG): container finished" podID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerID="a0f0f859551f1d13e0697eefb8df301306c38f57554742a889c212d272cb3764" exitCode=2 Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.896630 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5d343d91-86b0-48cc-b85d-fa7732e5ef47","Type":"ContainerDied","Data":"591716c8dd63cbfa49de5ae3c3e787834f68b0e939f0af5da8a47b3468bd651e"} Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.896752 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5d343d91-86b0-48cc-b85d-fa7732e5ef47","Type":"ContainerDied","Data":"a0f0f859551f1d13e0697eefb8df301306c38f57554742a889c212d272cb3764"} Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.896928 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="14ec6049-21a8-4428-b396-3b6604a69b76" containerName="nova-api-log" containerID="cri-o://1469ca41b35f2070b3cee06504923e11ac14f0b604552f74f5186cde780a47a4" gracePeriod=30 Dec 01 18:57:11 crc kubenswrapper[4935]: I1201 18:57:11.897001 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="14ec6049-21a8-4428-b396-3b6604a69b76" containerName="nova-api-api" containerID="cri-o://0a9568b48e0dc79df5b7fb49b367ddf286d65968c71304c6dd265ee64ea3ca56" gracePeriod=30 Dec 01 18:57:12 crc kubenswrapper[4935]: I1201 18:57:12.248652 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:12 crc kubenswrapper[4935]: I1201 18:57:12.922779 4935 generic.go:334] "Generic (PLEG): container finished" podID="14ec6049-21a8-4428-b396-3b6604a69b76" containerID="1469ca41b35f2070b3cee06504923e11ac14f0b604552f74f5186cde780a47a4" exitCode=143 Dec 01 18:57:12 crc kubenswrapper[4935]: I1201 18:57:12.922843 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14ec6049-21a8-4428-b396-3b6604a69b76","Type":"ContainerDied","Data":"1469ca41b35f2070b3cee06504923e11ac14f0b604552f74f5186cde780a47a4"} Dec 01 18:57:12 crc kubenswrapper[4935]: I1201 18:57:12.933203 4935 generic.go:334] "Generic (PLEG): container finished" podID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerID="f6be3bfc1d75dae7678d8fc953cd3824b238b95c4fe2e5a4c277777b43e575a1" exitCode=0 Dec 01 18:57:12 crc kubenswrapper[4935]: I1201 18:57:12.933308 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5d343d91-86b0-48cc-b85d-fa7732e5ef47","Type":"ContainerDied","Data":"f6be3bfc1d75dae7678d8fc953cd3824b238b95c4fe2e5a4c277777b43e575a1"} Dec 01 18:57:13 crc kubenswrapper[4935]: I1201 18:57:13.969327 4935 generic.go:334] "Generic (PLEG): container finished" podID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerID="93f3101e455850c4add6f1f62a151c95d4ccbbab3cc9b6c3e647f7afd4763979" exitCode=0 Dec 01 18:57:13 crc kubenswrapper[4935]: I1201 18:57:13.969616 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5d343d91-86b0-48cc-b85d-fa7732e5ef47","Type":"ContainerDied","Data":"93f3101e455850c4add6f1f62a151c95d4ccbbab3cc9b6c3e647f7afd4763979"} Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.168582 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.195839 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" podStartSLOduration=5.195821272 podStartE2EDuration="5.195821272s" podCreationTimestamp="2025-12-01 18:57:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:57:11.923796309 +0000 UTC m=+1645.945425568" watchObservedRunningTime="2025-12-01 18:57:14.195821272 +0000 UTC m=+1648.217450541" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.284266 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-config-data\") pod \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.284335 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-scripts\") pod \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.284417 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-sg-core-conf-yaml\") pod \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.284564 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d343d91-86b0-48cc-b85d-fa7732e5ef47-run-httpd\") pod \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.284634 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d343d91-86b0-48cc-b85d-fa7732e5ef47-log-httpd\") pod \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.284800 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-combined-ca-bundle\") pod \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.284831 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzlpm\" (UniqueName: \"kubernetes.io/projected/5d343d91-86b0-48cc-b85d-fa7732e5ef47-kube-api-access-rzlpm\") pod \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\" (UID: \"5d343d91-86b0-48cc-b85d-fa7732e5ef47\") " Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.285136 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d343d91-86b0-48cc-b85d-fa7732e5ef47-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5d343d91-86b0-48cc-b85d-fa7732e5ef47" (UID: "5d343d91-86b0-48cc-b85d-fa7732e5ef47"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.285383 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d343d91-86b0-48cc-b85d-fa7732e5ef47-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5d343d91-86b0-48cc-b85d-fa7732e5ef47" (UID: "5d343d91-86b0-48cc-b85d-fa7732e5ef47"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.285898 4935 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d343d91-86b0-48cc-b85d-fa7732e5ef47-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.285926 4935 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d343d91-86b0-48cc-b85d-fa7732e5ef47-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.293002 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d343d91-86b0-48cc-b85d-fa7732e5ef47-kube-api-access-rzlpm" (OuterVolumeSpecName: "kube-api-access-rzlpm") pod "5d343d91-86b0-48cc-b85d-fa7732e5ef47" (UID: "5d343d91-86b0-48cc-b85d-fa7732e5ef47"). InnerVolumeSpecName "kube-api-access-rzlpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.305304 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-scripts" (OuterVolumeSpecName: "scripts") pod "5d343d91-86b0-48cc-b85d-fa7732e5ef47" (UID: "5d343d91-86b0-48cc-b85d-fa7732e5ef47"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.318111 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5d343d91-86b0-48cc-b85d-fa7732e5ef47" (UID: "5d343d91-86b0-48cc-b85d-fa7732e5ef47"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.389259 4935 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.389303 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzlpm\" (UniqueName: \"kubernetes.io/projected/5d343d91-86b0-48cc-b85d-fa7732e5ef47-kube-api-access-rzlpm\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.389324 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.395574 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d343d91-86b0-48cc-b85d-fa7732e5ef47" (UID: "5d343d91-86b0-48cc-b85d-fa7732e5ef47"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.456957 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-config-data" (OuterVolumeSpecName: "config-data") pod "5d343d91-86b0-48cc-b85d-fa7732e5ef47" (UID: "5d343d91-86b0-48cc-b85d-fa7732e5ef47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.491558 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.491608 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d343d91-86b0-48cc-b85d-fa7732e5ef47-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.987702 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5d343d91-86b0-48cc-b85d-fa7732e5ef47","Type":"ContainerDied","Data":"86ca72493c6a4e2aef52bfb71dafebfc7cd94a9b459310a70e055008572fbfb4"} Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.987795 4935 scope.go:117] "RemoveContainer" containerID="591716c8dd63cbfa49de5ae3c3e787834f68b0e939f0af5da8a47b3468bd651e" Dec 01 18:57:14 crc kubenswrapper[4935]: I1201 18:57:14.987798 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.018977 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.024434 4935 scope.go:117] "RemoveContainer" containerID="a0f0f859551f1d13e0697eefb8df301306c38f57554742a889c212d272cb3764" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.028027 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.054638 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:57:15 crc kubenswrapper[4935]: E1201 18:57:15.055561 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="ceilometer-central-agent" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.055586 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="ceilometer-central-agent" Dec 01 18:57:15 crc kubenswrapper[4935]: E1201 18:57:15.055658 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="sg-core" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.055669 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="sg-core" Dec 01 18:57:15 crc kubenswrapper[4935]: E1201 18:57:15.055722 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="ceilometer-notification-agent" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.055732 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="ceilometer-notification-agent" Dec 01 18:57:15 crc kubenswrapper[4935]: E1201 18:57:15.055748 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="proxy-httpd" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.055755 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="proxy-httpd" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.056337 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="proxy-httpd" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.056368 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="ceilometer-central-agent" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.056386 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="ceilometer-notification-agent" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.056422 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" containerName="sg-core" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.059587 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.060721 4935 scope.go:117] "RemoveContainer" containerID="93f3101e455850c4add6f1f62a151c95d4ccbbab3cc9b6c3e647f7afd4763979" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.063577 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.063765 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.070508 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.098763 4935 scope.go:117] "RemoveContainer" containerID="f6be3bfc1d75dae7678d8fc953cd3824b238b95c4fe2e5a4c277777b43e575a1" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.108223 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.108262 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-run-httpd\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.108293 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-scripts\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.108468 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-config-data\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.108623 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.108812 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8txbn\" (UniqueName: \"kubernetes.io/projected/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-kube-api-access-8txbn\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.108935 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-log-httpd\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.211053 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.211481 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-run-httpd\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.211516 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-scripts\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.211560 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-config-data\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.211652 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.211699 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8txbn\" (UniqueName: \"kubernetes.io/projected/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-kube-api-access-8txbn\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.211760 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-log-httpd\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.211976 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-run-httpd\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.212328 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-log-httpd\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.220421 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.223215 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-config-data\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.223812 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-scripts\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.225311 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.230072 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8txbn\" (UniqueName: \"kubernetes.io/projected/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-kube-api-access-8txbn\") pod \"ceilometer-0\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.392875 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.478126 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.518133 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6xmg\" (UniqueName: \"kubernetes.io/projected/14ec6049-21a8-4428-b396-3b6604a69b76-kube-api-access-v6xmg\") pod \"14ec6049-21a8-4428-b396-3b6604a69b76\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.518804 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ec6049-21a8-4428-b396-3b6604a69b76-config-data\") pod \"14ec6049-21a8-4428-b396-3b6604a69b76\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.518927 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14ec6049-21a8-4428-b396-3b6604a69b76-logs\") pod \"14ec6049-21a8-4428-b396-3b6604a69b76\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.519023 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ec6049-21a8-4428-b396-3b6604a69b76-combined-ca-bundle\") pod \"14ec6049-21a8-4428-b396-3b6604a69b76\" (UID: \"14ec6049-21a8-4428-b396-3b6604a69b76\") " Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.524968 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14ec6049-21a8-4428-b396-3b6604a69b76-logs" (OuterVolumeSpecName: "logs") pod "14ec6049-21a8-4428-b396-3b6604a69b76" (UID: "14ec6049-21a8-4428-b396-3b6604a69b76"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.541623 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14ec6049-21a8-4428-b396-3b6604a69b76-kube-api-access-v6xmg" (OuterVolumeSpecName: "kube-api-access-v6xmg") pod "14ec6049-21a8-4428-b396-3b6604a69b76" (UID: "14ec6049-21a8-4428-b396-3b6604a69b76"). InnerVolumeSpecName "kube-api-access-v6xmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.576039 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14ec6049-21a8-4428-b396-3b6604a69b76-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14ec6049-21a8-4428-b396-3b6604a69b76" (UID: "14ec6049-21a8-4428-b396-3b6604a69b76"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.622443 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6xmg\" (UniqueName: \"kubernetes.io/projected/14ec6049-21a8-4428-b396-3b6604a69b76-kube-api-access-v6xmg\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.622477 4935 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14ec6049-21a8-4428-b396-3b6604a69b76-logs\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.622489 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ec6049-21a8-4428-b396-3b6604a69b76-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.624545 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14ec6049-21a8-4428-b396-3b6604a69b76-config-data" (OuterVolumeSpecName: "config-data") pod "14ec6049-21a8-4428-b396-3b6604a69b76" (UID: "14ec6049-21a8-4428-b396-3b6604a69b76"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.726394 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ec6049-21a8-4428-b396-3b6604a69b76-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:15 crc kubenswrapper[4935]: I1201 18:57:15.924334 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.006102 4935 generic.go:334] "Generic (PLEG): container finished" podID="14ec6049-21a8-4428-b396-3b6604a69b76" containerID="0a9568b48e0dc79df5b7fb49b367ddf286d65968c71304c6dd265ee64ea3ca56" exitCode=0 Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.006186 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14ec6049-21a8-4428-b396-3b6604a69b76","Type":"ContainerDied","Data":"0a9568b48e0dc79df5b7fb49b367ddf286d65968c71304c6dd265ee64ea3ca56"} Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.006239 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14ec6049-21a8-4428-b396-3b6604a69b76","Type":"ContainerDied","Data":"39766c8946bbcdb38e32947c5f922ca86ab743da7a777df37df08a07930cdee9"} Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.006277 4935 scope.go:117] "RemoveContainer" containerID="0a9568b48e0dc79df5b7fb49b367ddf286d65968c71304c6dd265ee64ea3ca56" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.006351 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.012790 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4","Type":"ContainerStarted","Data":"b3f4a7d195efa22fd2222d4dfba11cc9e809cc7c0332c47c5b79f82191ceb033"} Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.057053 4935 scope.go:117] "RemoveContainer" containerID="1469ca41b35f2070b3cee06504923e11ac14f0b604552f74f5186cde780a47a4" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.102921 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.109172 4935 scope.go:117] "RemoveContainer" containerID="0a9568b48e0dc79df5b7fb49b367ddf286d65968c71304c6dd265ee64ea3ca56" Dec 01 18:57:16 crc kubenswrapper[4935]: E1201 18:57:16.110241 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a9568b48e0dc79df5b7fb49b367ddf286d65968c71304c6dd265ee64ea3ca56\": container with ID starting with 0a9568b48e0dc79df5b7fb49b367ddf286d65968c71304c6dd265ee64ea3ca56 not found: ID does not exist" containerID="0a9568b48e0dc79df5b7fb49b367ddf286d65968c71304c6dd265ee64ea3ca56" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.110276 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a9568b48e0dc79df5b7fb49b367ddf286d65968c71304c6dd265ee64ea3ca56"} err="failed to get container status \"0a9568b48e0dc79df5b7fb49b367ddf286d65968c71304c6dd265ee64ea3ca56\": rpc error: code = NotFound desc = could not find container \"0a9568b48e0dc79df5b7fb49b367ddf286d65968c71304c6dd265ee64ea3ca56\": container with ID starting with 0a9568b48e0dc79df5b7fb49b367ddf286d65968c71304c6dd265ee64ea3ca56 not found: ID does not exist" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.110383 4935 scope.go:117] "RemoveContainer" containerID="1469ca41b35f2070b3cee06504923e11ac14f0b604552f74f5186cde780a47a4" Dec 01 18:57:16 crc kubenswrapper[4935]: E1201 18:57:16.112268 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1469ca41b35f2070b3cee06504923e11ac14f0b604552f74f5186cde780a47a4\": container with ID starting with 1469ca41b35f2070b3cee06504923e11ac14f0b604552f74f5186cde780a47a4 not found: ID does not exist" containerID="1469ca41b35f2070b3cee06504923e11ac14f0b604552f74f5186cde780a47a4" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.112293 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1469ca41b35f2070b3cee06504923e11ac14f0b604552f74f5186cde780a47a4"} err="failed to get container status \"1469ca41b35f2070b3cee06504923e11ac14f0b604552f74f5186cde780a47a4\": rpc error: code = NotFound desc = could not find container \"1469ca41b35f2070b3cee06504923e11ac14f0b604552f74f5186cde780a47a4\": container with ID starting with 1469ca41b35f2070b3cee06504923e11ac14f0b604552f74f5186cde780a47a4 not found: ID does not exist" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.119742 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.133538 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 18:57:16 crc kubenswrapper[4935]: E1201 18:57:16.134063 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14ec6049-21a8-4428-b396-3b6604a69b76" containerName="nova-api-api" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.134082 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="14ec6049-21a8-4428-b396-3b6604a69b76" containerName="nova-api-api" Dec 01 18:57:16 crc kubenswrapper[4935]: E1201 18:57:16.134100 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14ec6049-21a8-4428-b396-3b6604a69b76" containerName="nova-api-log" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.134107 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="14ec6049-21a8-4428-b396-3b6604a69b76" containerName="nova-api-log" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.134348 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="14ec6049-21a8-4428-b396-3b6604a69b76" containerName="nova-api-log" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.134384 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="14ec6049-21a8-4428-b396-3b6604a69b76" containerName="nova-api-api" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.139625 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.143248 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.143593 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.143620 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.146894 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.242078 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.242167 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.242271 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-public-tls-certs\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.242327 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-config-data\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.242398 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7dc76632-c294-4467-8877-c69252ba3c8e-logs\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.242460 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j57d8\" (UniqueName: \"kubernetes.io/projected/7dc76632-c294-4467-8877-c69252ba3c8e-kube-api-access-j57d8\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.344765 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-public-tls-certs\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.344858 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-config-data\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.344943 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7dc76632-c294-4467-8877-c69252ba3c8e-logs\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.345013 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j57d8\" (UniqueName: \"kubernetes.io/projected/7dc76632-c294-4467-8877-c69252ba3c8e-kube-api-access-j57d8\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.345108 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.345143 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.346126 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7dc76632-c294-4467-8877-c69252ba3c8e-logs\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.353631 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-public-tls-certs\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.353763 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-config-data\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.356636 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.358677 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.375205 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j57d8\" (UniqueName: \"kubernetes.io/projected/7dc76632-c294-4467-8877-c69252ba3c8e-kube-api-access-j57d8\") pod \"nova-api-0\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.461746 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.541679 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14ec6049-21a8-4428-b396-3b6604a69b76" path="/var/lib/kubelet/pods/14ec6049-21a8-4428-b396-3b6604a69b76/volumes" Dec 01 18:57:16 crc kubenswrapper[4935]: I1201 18:57:16.543364 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d343d91-86b0-48cc-b85d-fa7732e5ef47" path="/var/lib/kubelet/pods/5d343d91-86b0-48cc-b85d-fa7732e5ef47/volumes" Dec 01 18:57:17 crc kubenswrapper[4935]: I1201 18:57:17.025251 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:57:17 crc kubenswrapper[4935]: I1201 18:57:17.031343 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4","Type":"ContainerStarted","Data":"efe3173a5574d2a83c6a8fa1d574b2b291389255d75c6bcc74237ab50a019a67"} Dec 01 18:57:17 crc kubenswrapper[4935]: I1201 18:57:17.249170 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:17 crc kubenswrapper[4935]: I1201 18:57:17.271649 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.044869 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4","Type":"ContainerStarted","Data":"e2c86ee668bf34756612494549d233785400d4f2d44120edf6e90badbf6bd211"} Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.047830 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7dc76632-c294-4467-8877-c69252ba3c8e","Type":"ContainerStarted","Data":"c62f09ca557d32bf1f35cb5a86cdfb6ebaf8f3b31dd41f749e4554f627329fee"} Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.047895 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7dc76632-c294-4467-8877-c69252ba3c8e","Type":"ContainerStarted","Data":"a9ef25255373ecd4ec2567a3f7fc6cc605f78c9d9284782103f5561375e7c6d9"} Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.047918 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7dc76632-c294-4467-8877-c69252ba3c8e","Type":"ContainerStarted","Data":"c9eb06619589a5b63a9ea748c8441aca7cb017e17b1bf881143e92535b6e0599"} Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.078079 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.078058247 podStartE2EDuration="2.078058247s" podCreationTimestamp="2025-12-01 18:57:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:57:18.06992528 +0000 UTC m=+1652.091554589" watchObservedRunningTime="2025-12-01 18:57:18.078058247 +0000 UTC m=+1652.099687506" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.089413 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.247394 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-z4zxp"] Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.248940 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.253825 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.254270 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.268707 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-z4zxp"] Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.333042 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-config-data\") pod \"nova-cell1-cell-mapping-z4zxp\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.333159 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-z4zxp\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.333344 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-scripts\") pod \"nova-cell1-cell-mapping-z4zxp\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.333434 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2tt7\" (UniqueName: \"kubernetes.io/projected/a6d04009-17df-4097-bc8b-4bea0e885074-kube-api-access-h2tt7\") pod \"nova-cell1-cell-mapping-z4zxp\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.436133 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-z4zxp\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.436680 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-scripts\") pod \"nova-cell1-cell-mapping-z4zxp\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.437333 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2tt7\" (UniqueName: \"kubernetes.io/projected/a6d04009-17df-4097-bc8b-4bea0e885074-kube-api-access-h2tt7\") pod \"nova-cell1-cell-mapping-z4zxp\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.437528 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-config-data\") pod \"nova-cell1-cell-mapping-z4zxp\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.442799 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-config-data\") pod \"nova-cell1-cell-mapping-z4zxp\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.442979 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-scripts\") pod \"nova-cell1-cell-mapping-z4zxp\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.443618 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-z4zxp\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.452414 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2tt7\" (UniqueName: \"kubernetes.io/projected/a6d04009-17df-4097-bc8b-4bea0e885074-kube-api-access-h2tt7\") pod \"nova-cell1-cell-mapping-z4zxp\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:18 crc kubenswrapper[4935]: I1201 18:57:18.580742 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:19 crc kubenswrapper[4935]: I1201 18:57:19.061601 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4","Type":"ContainerStarted","Data":"a138dadec0e8590d728166b9f6e4dd19024f575a8c29a4316d4f3f0e93c36cdf"} Dec 01 18:57:19 crc kubenswrapper[4935]: I1201 18:57:19.064347 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-z4zxp"] Dec 01 18:57:19 crc kubenswrapper[4935]: I1201 18:57:19.439367 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:57:19 crc kubenswrapper[4935]: I1201 18:57:19.508774 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:57:19 crc kubenswrapper[4935]: E1201 18:57:19.509392 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:57:19 crc kubenswrapper[4935]: I1201 18:57:19.511819 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-568d7fd7cf-ccnk9"] Dec 01 18:57:19 crc kubenswrapper[4935]: I1201 18:57:19.512074 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" podUID="76a99b6f-1afc-4b35-a462-809d0f160048" containerName="dnsmasq-dns" containerID="cri-o://0c76380debe78cfa5abaa1ddf4ca11c8fd89de56056b3d73bdf052acdf244d3e" gracePeriod=10 Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.072527 4935 generic.go:334] "Generic (PLEG): container finished" podID="76a99b6f-1afc-4b35-a462-809d0f160048" containerID="0c76380debe78cfa5abaa1ddf4ca11c8fd89de56056b3d73bdf052acdf244d3e" exitCode=0 Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.072593 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" event={"ID":"76a99b6f-1afc-4b35-a462-809d0f160048","Type":"ContainerDied","Data":"0c76380debe78cfa5abaa1ddf4ca11c8fd89de56056b3d73bdf052acdf244d3e"} Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.074765 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-z4zxp" event={"ID":"a6d04009-17df-4097-bc8b-4bea0e885074","Type":"ContainerStarted","Data":"84c6a92829b5dbc587e82d8c31276c2867df34c21bdb828f4d5ec3eec8fa15aa"} Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.074789 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-z4zxp" event={"ID":"a6d04009-17df-4097-bc8b-4bea0e885074","Type":"ContainerStarted","Data":"3363b38829fadd314ccec4f065c2ea8344ed82b874d73c538896e43da3b886ed"} Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.113897 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-z4zxp" podStartSLOduration=2.113877958 podStartE2EDuration="2.113877958s" podCreationTimestamp="2025-12-01 18:57:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:57:20.102561791 +0000 UTC m=+1654.124191050" watchObservedRunningTime="2025-12-01 18:57:20.113877958 +0000 UTC m=+1654.135507217" Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.341289 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.492928 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-dns-swift-storage-0\") pod \"76a99b6f-1afc-4b35-a462-809d0f160048\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.493390 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-ovsdbserver-sb\") pod \"76a99b6f-1afc-4b35-a462-809d0f160048\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.493434 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-dns-svc\") pod \"76a99b6f-1afc-4b35-a462-809d0f160048\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.493544 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfwd2\" (UniqueName: \"kubernetes.io/projected/76a99b6f-1afc-4b35-a462-809d0f160048-kube-api-access-bfwd2\") pod \"76a99b6f-1afc-4b35-a462-809d0f160048\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.493582 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-ovsdbserver-nb\") pod \"76a99b6f-1afc-4b35-a462-809d0f160048\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.493624 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-config\") pod \"76a99b6f-1afc-4b35-a462-809d0f160048\" (UID: \"76a99b6f-1afc-4b35-a462-809d0f160048\") " Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.504121 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76a99b6f-1afc-4b35-a462-809d0f160048-kube-api-access-bfwd2" (OuterVolumeSpecName: "kube-api-access-bfwd2") pod "76a99b6f-1afc-4b35-a462-809d0f160048" (UID: "76a99b6f-1afc-4b35-a462-809d0f160048"). InnerVolumeSpecName "kube-api-access-bfwd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.597958 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfwd2\" (UniqueName: \"kubernetes.io/projected/76a99b6f-1afc-4b35-a462-809d0f160048-kube-api-access-bfwd2\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.614737 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-config" (OuterVolumeSpecName: "config") pod "76a99b6f-1afc-4b35-a462-809d0f160048" (UID: "76a99b6f-1afc-4b35-a462-809d0f160048"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.626546 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "76a99b6f-1afc-4b35-a462-809d0f160048" (UID: "76a99b6f-1afc-4b35-a462-809d0f160048"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.629236 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "76a99b6f-1afc-4b35-a462-809d0f160048" (UID: "76a99b6f-1afc-4b35-a462-809d0f160048"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.630824 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "76a99b6f-1afc-4b35-a462-809d0f160048" (UID: "76a99b6f-1afc-4b35-a462-809d0f160048"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.702618 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.702650 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.702659 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.702667 4935 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.720783 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "76a99b6f-1afc-4b35-a462-809d0f160048" (UID: "76a99b6f-1afc-4b35-a462-809d0f160048"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:57:20 crc kubenswrapper[4935]: I1201 18:57:20.805198 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76a99b6f-1afc-4b35-a462-809d0f160048-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:21 crc kubenswrapper[4935]: I1201 18:57:21.085401 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" event={"ID":"76a99b6f-1afc-4b35-a462-809d0f160048","Type":"ContainerDied","Data":"a24ba9af04f67956374c8a5c962b446d2dcbb53d8844dc0985da68b9cd2de619"} Dec 01 18:57:21 crc kubenswrapper[4935]: I1201 18:57:21.085451 4935 scope.go:117] "RemoveContainer" containerID="0c76380debe78cfa5abaa1ddf4ca11c8fd89de56056b3d73bdf052acdf244d3e" Dec 01 18:57:21 crc kubenswrapper[4935]: I1201 18:57:21.085602 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568d7fd7cf-ccnk9" Dec 01 18:57:21 crc kubenswrapper[4935]: I1201 18:57:21.094483 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4","Type":"ContainerStarted","Data":"41edcf9677c229a49d6fb8ea1319b7a64c47f6a81ce11a865446dd28ae2f849d"} Dec 01 18:57:21 crc kubenswrapper[4935]: I1201 18:57:21.094522 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 18:57:21 crc kubenswrapper[4935]: I1201 18:57:21.119857 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.903917679 podStartE2EDuration="6.119837466s" podCreationTimestamp="2025-12-01 18:57:15 +0000 UTC" firstStartedPulling="2025-12-01 18:57:15.924014361 +0000 UTC m=+1649.945643620" lastFinishedPulling="2025-12-01 18:57:20.139934148 +0000 UTC m=+1654.161563407" observedRunningTime="2025-12-01 18:57:21.115861721 +0000 UTC m=+1655.137490980" watchObservedRunningTime="2025-12-01 18:57:21.119837466 +0000 UTC m=+1655.141466725" Dec 01 18:57:21 crc kubenswrapper[4935]: I1201 18:57:21.125762 4935 scope.go:117] "RemoveContainer" containerID="6aecf518c7dd19763d459504f1b74467c4699e556558f4370ab788026247d664" Dec 01 18:57:21 crc kubenswrapper[4935]: I1201 18:57:21.159555 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-568d7fd7cf-ccnk9"] Dec 01 18:57:21 crc kubenswrapper[4935]: I1201 18:57:21.177002 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-568d7fd7cf-ccnk9"] Dec 01 18:57:22 crc kubenswrapper[4935]: I1201 18:57:22.523936 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76a99b6f-1afc-4b35-a462-809d0f160048" path="/var/lib/kubelet/pods/76a99b6f-1afc-4b35-a462-809d0f160048/volumes" Dec 01 18:57:25 crc kubenswrapper[4935]: I1201 18:57:25.177171 4935 generic.go:334] "Generic (PLEG): container finished" podID="a6d04009-17df-4097-bc8b-4bea0e885074" containerID="84c6a92829b5dbc587e82d8c31276c2867df34c21bdb828f4d5ec3eec8fa15aa" exitCode=0 Dec 01 18:57:25 crc kubenswrapper[4935]: I1201 18:57:25.177290 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-z4zxp" event={"ID":"a6d04009-17df-4097-bc8b-4bea0e885074","Type":"ContainerDied","Data":"84c6a92829b5dbc587e82d8c31276c2867df34c21bdb828f4d5ec3eec8fa15aa"} Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.462725 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.465237 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.698611 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.789023 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2tt7\" (UniqueName: \"kubernetes.io/projected/a6d04009-17df-4097-bc8b-4bea0e885074-kube-api-access-h2tt7\") pod \"a6d04009-17df-4097-bc8b-4bea0e885074\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.789128 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-scripts\") pod \"a6d04009-17df-4097-bc8b-4bea0e885074\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.789177 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-combined-ca-bundle\") pod \"a6d04009-17df-4097-bc8b-4bea0e885074\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.789448 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-config-data\") pod \"a6d04009-17df-4097-bc8b-4bea0e885074\" (UID: \"a6d04009-17df-4097-bc8b-4bea0e885074\") " Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.800828 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-scripts" (OuterVolumeSpecName: "scripts") pod "a6d04009-17df-4097-bc8b-4bea0e885074" (UID: "a6d04009-17df-4097-bc8b-4bea0e885074"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.812452 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6d04009-17df-4097-bc8b-4bea0e885074-kube-api-access-h2tt7" (OuterVolumeSpecName: "kube-api-access-h2tt7") pod "a6d04009-17df-4097-bc8b-4bea0e885074" (UID: "a6d04009-17df-4097-bc8b-4bea0e885074"). InnerVolumeSpecName "kube-api-access-h2tt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.891834 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2tt7\" (UniqueName: \"kubernetes.io/projected/a6d04009-17df-4097-bc8b-4bea0e885074-kube-api-access-h2tt7\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.891872 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.899294 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-config-data" (OuterVolumeSpecName: "config-data") pod "a6d04009-17df-4097-bc8b-4bea0e885074" (UID: "a6d04009-17df-4097-bc8b-4bea0e885074"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.913273 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a6d04009-17df-4097-bc8b-4bea0e885074" (UID: "a6d04009-17df-4097-bc8b-4bea0e885074"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.994044 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:26 crc kubenswrapper[4935]: I1201 18:57:26.994094 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d04009-17df-4097-bc8b-4bea0e885074-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:27 crc kubenswrapper[4935]: I1201 18:57:27.200219 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-z4zxp" Dec 01 18:57:27 crc kubenswrapper[4935]: I1201 18:57:27.200222 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-z4zxp" event={"ID":"a6d04009-17df-4097-bc8b-4bea0e885074","Type":"ContainerDied","Data":"3363b38829fadd314ccec4f065c2ea8344ed82b874d73c538896e43da3b886ed"} Dec 01 18:57:27 crc kubenswrapper[4935]: I1201 18:57:27.200307 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3363b38829fadd314ccec4f065c2ea8344ed82b874d73c538896e43da3b886ed" Dec 01 18:57:27 crc kubenswrapper[4935]: I1201 18:57:27.469657 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:57:27 crc kubenswrapper[4935]: I1201 18:57:27.495436 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7dc76632-c294-4467-8877-c69252ba3c8e" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.252:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 18:57:27 crc kubenswrapper[4935]: I1201 18:57:27.495431 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7dc76632-c294-4467-8877-c69252ba3c8e" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.252:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 18:57:27 crc kubenswrapper[4935]: I1201 18:57:27.504511 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:57:27 crc kubenswrapper[4935]: I1201 18:57:27.504731 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d38f760d-4c52-437b-b407-778866305310" containerName="nova-scheduler-scheduler" containerID="cri-o://7d9916b38a0b4de328628de1c63f8d753876e9fcdc4b180c880def9e61a553ed" gracePeriod=30 Dec 01 18:57:27 crc kubenswrapper[4935]: I1201 18:57:27.520124 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:57:27 crc kubenswrapper[4935]: I1201 18:57:27.520383 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d84da5cc-8329-4e71-bd5f-0a3db9819952" containerName="nova-metadata-log" containerID="cri-o://2df6e687ea7b34f4a546010b79948dc91eafd8118c9b85859c0c7e3bf0296b04" gracePeriod=30 Dec 01 18:57:27 crc kubenswrapper[4935]: I1201 18:57:27.520513 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d84da5cc-8329-4e71-bd5f-0a3db9819952" containerName="nova-metadata-metadata" containerID="cri-o://2cd8357229005f141479735a80d42ca52db6f3a8cc4b0ac74c976dc362259312" gracePeriod=30 Dec 01 18:57:28 crc kubenswrapper[4935]: I1201 18:57:28.219169 4935 generic.go:334] "Generic (PLEG): container finished" podID="d84da5cc-8329-4e71-bd5f-0a3db9819952" containerID="2df6e687ea7b34f4a546010b79948dc91eafd8118c9b85859c0c7e3bf0296b04" exitCode=143 Dec 01 18:57:28 crc kubenswrapper[4935]: I1201 18:57:28.219339 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d84da5cc-8329-4e71-bd5f-0a3db9819952","Type":"ContainerDied","Data":"2df6e687ea7b34f4a546010b79948dc91eafd8118c9b85859c0c7e3bf0296b04"} Dec 01 18:57:28 crc kubenswrapper[4935]: I1201 18:57:28.219390 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7dc76632-c294-4467-8877-c69252ba3c8e" containerName="nova-api-log" containerID="cri-o://a9ef25255373ecd4ec2567a3f7fc6cc605f78c9d9284782103f5561375e7c6d9" gracePeriod=30 Dec 01 18:57:28 crc kubenswrapper[4935]: I1201 18:57:28.219465 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7dc76632-c294-4467-8877-c69252ba3c8e" containerName="nova-api-api" containerID="cri-o://c62f09ca557d32bf1f35cb5a86cdfb6ebaf8f3b31dd41f749e4554f627329fee" gracePeriod=30 Dec 01 18:57:29 crc kubenswrapper[4935]: I1201 18:57:29.246051 4935 generic.go:334] "Generic (PLEG): container finished" podID="7dc76632-c294-4467-8877-c69252ba3c8e" containerID="a9ef25255373ecd4ec2567a3f7fc6cc605f78c9d9284782103f5561375e7c6d9" exitCode=143 Dec 01 18:57:29 crc kubenswrapper[4935]: I1201 18:57:29.246176 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7dc76632-c294-4467-8877-c69252ba3c8e","Type":"ContainerDied","Data":"a9ef25255373ecd4ec2567a3f7fc6cc605f78c9d9284782103f5561375e7c6d9"} Dec 01 18:57:30 crc kubenswrapper[4935]: I1201 18:57:30.508911 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:57:30 crc kubenswrapper[4935]: E1201 18:57:30.509618 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.194402 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.287160 4935 generic.go:334] "Generic (PLEG): container finished" podID="d84da5cc-8329-4e71-bd5f-0a3db9819952" containerID="2cd8357229005f141479735a80d42ca52db6f3a8cc4b0ac74c976dc362259312" exitCode=0 Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.287270 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.287307 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d84da5cc-8329-4e71-bd5f-0a3db9819952","Type":"ContainerDied","Data":"2cd8357229005f141479735a80d42ca52db6f3a8cc4b0ac74c976dc362259312"} Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.288447 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d84da5cc-8329-4e71-bd5f-0a3db9819952","Type":"ContainerDied","Data":"a8f1d017350f4b07b64ed8a65e8f089e84100afa10f2e83205be6f18f3790ca1"} Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.288470 4935 scope.go:117] "RemoveContainer" containerID="2cd8357229005f141479735a80d42ca52db6f3a8cc4b0ac74c976dc362259312" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.296188 4935 generic.go:334] "Generic (PLEG): container finished" podID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerID="9346e8bfbc1d2b891d3134fba4cea1b121e0a29b099d098b7ca194e78e56a1ca" exitCode=137 Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.296222 4935 generic.go:334] "Generic (PLEG): container finished" podID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerID="415fbfc5d1f5f339b420457e74ae6b4efa8aa1cc05473e935e0db9969571d772" exitCode=137 Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.296243 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f98ab763-d98c-4ad6-bc2e-a943c33b43fc","Type":"ContainerDied","Data":"9346e8bfbc1d2b891d3134fba4cea1b121e0a29b099d098b7ca194e78e56a1ca"} Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.296272 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f98ab763-d98c-4ad6-bc2e-a943c33b43fc","Type":"ContainerDied","Data":"415fbfc5d1f5f339b420457e74ae6b4efa8aa1cc05473e935e0db9969571d772"} Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.300113 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-combined-ca-bundle\") pod \"d84da5cc-8329-4e71-bd5f-0a3db9819952\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.300217 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddvxs\" (UniqueName: \"kubernetes.io/projected/d84da5cc-8329-4e71-bd5f-0a3db9819952-kube-api-access-ddvxs\") pod \"d84da5cc-8329-4e71-bd5f-0a3db9819952\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.300290 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d84da5cc-8329-4e71-bd5f-0a3db9819952-logs\") pod \"d84da5cc-8329-4e71-bd5f-0a3db9819952\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.300340 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-config-data\") pod \"d84da5cc-8329-4e71-bd5f-0a3db9819952\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.300564 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-nova-metadata-tls-certs\") pod \"d84da5cc-8329-4e71-bd5f-0a3db9819952\" (UID: \"d84da5cc-8329-4e71-bd5f-0a3db9819952\") " Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.301065 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d84da5cc-8329-4e71-bd5f-0a3db9819952-logs" (OuterVolumeSpecName: "logs") pod "d84da5cc-8329-4e71-bd5f-0a3db9819952" (UID: "d84da5cc-8329-4e71-bd5f-0a3db9819952"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.301896 4935 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d84da5cc-8329-4e71-bd5f-0a3db9819952-logs\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.305714 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d84da5cc-8329-4e71-bd5f-0a3db9819952-kube-api-access-ddvxs" (OuterVolumeSpecName: "kube-api-access-ddvxs") pod "d84da5cc-8329-4e71-bd5f-0a3db9819952" (UID: "d84da5cc-8329-4e71-bd5f-0a3db9819952"). InnerVolumeSpecName "kube-api-access-ddvxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.338878 4935 scope.go:117] "RemoveContainer" containerID="2df6e687ea7b34f4a546010b79948dc91eafd8118c9b85859c0c7e3bf0296b04" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.355079 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d84da5cc-8329-4e71-bd5f-0a3db9819952" (UID: "d84da5cc-8329-4e71-bd5f-0a3db9819952"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.366815 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-config-data" (OuterVolumeSpecName: "config-data") pod "d84da5cc-8329-4e71-bd5f-0a3db9819952" (UID: "d84da5cc-8329-4e71-bd5f-0a3db9819952"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.370142 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "d84da5cc-8329-4e71-bd5f-0a3db9819952" (UID: "d84da5cc-8329-4e71-bd5f-0a3db9819952"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.402106 4935 scope.go:117] "RemoveContainer" containerID="2cd8357229005f141479735a80d42ca52db6f3a8cc4b0ac74c976dc362259312" Dec 01 18:57:31 crc kubenswrapper[4935]: E1201 18:57:31.403023 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cd8357229005f141479735a80d42ca52db6f3a8cc4b0ac74c976dc362259312\": container with ID starting with 2cd8357229005f141479735a80d42ca52db6f3a8cc4b0ac74c976dc362259312 not found: ID does not exist" containerID="2cd8357229005f141479735a80d42ca52db6f3a8cc4b0ac74c976dc362259312" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.403080 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cd8357229005f141479735a80d42ca52db6f3a8cc4b0ac74c976dc362259312"} err="failed to get container status \"2cd8357229005f141479735a80d42ca52db6f3a8cc4b0ac74c976dc362259312\": rpc error: code = NotFound desc = could not find container \"2cd8357229005f141479735a80d42ca52db6f3a8cc4b0ac74c976dc362259312\": container with ID starting with 2cd8357229005f141479735a80d42ca52db6f3a8cc4b0ac74c976dc362259312 not found: ID does not exist" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.403108 4935 scope.go:117] "RemoveContainer" containerID="2df6e687ea7b34f4a546010b79948dc91eafd8118c9b85859c0c7e3bf0296b04" Dec 01 18:57:31 crc kubenswrapper[4935]: E1201 18:57:31.403490 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2df6e687ea7b34f4a546010b79948dc91eafd8118c9b85859c0c7e3bf0296b04\": container with ID starting with 2df6e687ea7b34f4a546010b79948dc91eafd8118c9b85859c0c7e3bf0296b04 not found: ID does not exist" containerID="2df6e687ea7b34f4a546010b79948dc91eafd8118c9b85859c0c7e3bf0296b04" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.403508 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2df6e687ea7b34f4a546010b79948dc91eafd8118c9b85859c0c7e3bf0296b04"} err="failed to get container status \"2df6e687ea7b34f4a546010b79948dc91eafd8118c9b85859c0c7e3bf0296b04\": rpc error: code = NotFound desc = could not find container \"2df6e687ea7b34f4a546010b79948dc91eafd8118c9b85859c0c7e3bf0296b04\": container with ID starting with 2df6e687ea7b34f4a546010b79948dc91eafd8118c9b85859c0c7e3bf0296b04 not found: ID does not exist" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.409092 4935 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.409121 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.409135 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddvxs\" (UniqueName: \"kubernetes.io/projected/d84da5cc-8329-4e71-bd5f-0a3db9819952-kube-api-access-ddvxs\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.409181 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d84da5cc-8329-4e71-bd5f-0a3db9819952-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.641958 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.655258 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.679772 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.709938 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:57:31 crc kubenswrapper[4935]: E1201 18:57:31.710694 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6d04009-17df-4097-bc8b-4bea0e885074" containerName="nova-manage" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.710707 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6d04009-17df-4097-bc8b-4bea0e885074" containerName="nova-manage" Dec 01 18:57:31 crc kubenswrapper[4935]: E1201 18:57:31.710740 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-api" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.710745 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-api" Dec 01 18:57:31 crc kubenswrapper[4935]: E1201 18:57:31.710757 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-listener" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.710763 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-listener" Dec 01 18:57:31 crc kubenswrapper[4935]: E1201 18:57:31.710772 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-notifier" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.710778 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-notifier" Dec 01 18:57:31 crc kubenswrapper[4935]: E1201 18:57:31.710787 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d84da5cc-8329-4e71-bd5f-0a3db9819952" containerName="nova-metadata-metadata" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.710794 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d84da5cc-8329-4e71-bd5f-0a3db9819952" containerName="nova-metadata-metadata" Dec 01 18:57:31 crc kubenswrapper[4935]: E1201 18:57:31.710810 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76a99b6f-1afc-4b35-a462-809d0f160048" containerName="dnsmasq-dns" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.710817 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="76a99b6f-1afc-4b35-a462-809d0f160048" containerName="dnsmasq-dns" Dec 01 18:57:31 crc kubenswrapper[4935]: E1201 18:57:31.710835 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d84da5cc-8329-4e71-bd5f-0a3db9819952" containerName="nova-metadata-log" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.710841 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d84da5cc-8329-4e71-bd5f-0a3db9819952" containerName="nova-metadata-log" Dec 01 18:57:31 crc kubenswrapper[4935]: E1201 18:57:31.710851 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76a99b6f-1afc-4b35-a462-809d0f160048" containerName="init" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.710856 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="76a99b6f-1afc-4b35-a462-809d0f160048" containerName="init" Dec 01 18:57:31 crc kubenswrapper[4935]: E1201 18:57:31.710871 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-evaluator" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.710877 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-evaluator" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.711091 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-evaluator" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.711117 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-api" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.711127 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="d84da5cc-8329-4e71-bd5f-0a3db9819952" containerName="nova-metadata-log" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.711138 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="d84da5cc-8329-4e71-bd5f-0a3db9819952" containerName="nova-metadata-metadata" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.711170 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-notifier" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.711179 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6d04009-17df-4097-bc8b-4bea0e885074" containerName="nova-manage" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.711191 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="76a99b6f-1afc-4b35-a462-809d0f160048" containerName="dnsmasq-dns" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.711200 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" containerName="aodh-listener" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.712396 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.724912 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.725710 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.730752 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.824862 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-scripts\") pod \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.824898 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-combined-ca-bundle\") pod \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.825008 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-config-data\") pod \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.825057 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7pdw\" (UniqueName: \"kubernetes.io/projected/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-kube-api-access-r7pdw\") pod \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\" (UID: \"f98ab763-d98c-4ad6-bc2e-a943c33b43fc\") " Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.825464 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34d0ec26-0662-497d-9cdc-278de9d991f4-config-data\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.825497 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34d0ec26-0662-497d-9cdc-278de9d991f4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.825538 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gtqx\" (UniqueName: \"kubernetes.io/projected/34d0ec26-0662-497d-9cdc-278de9d991f4-kube-api-access-9gtqx\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.825594 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34d0ec26-0662-497d-9cdc-278de9d991f4-logs\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.825663 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/34d0ec26-0662-497d-9cdc-278de9d991f4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.833300 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-scripts" (OuterVolumeSpecName: "scripts") pod "f98ab763-d98c-4ad6-bc2e-a943c33b43fc" (UID: "f98ab763-d98c-4ad6-bc2e-a943c33b43fc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.851496 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-kube-api-access-r7pdw" (OuterVolumeSpecName: "kube-api-access-r7pdw") pod "f98ab763-d98c-4ad6-bc2e-a943c33b43fc" (UID: "f98ab763-d98c-4ad6-bc2e-a943c33b43fc"). InnerVolumeSpecName "kube-api-access-r7pdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.927120 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gtqx\" (UniqueName: \"kubernetes.io/projected/34d0ec26-0662-497d-9cdc-278de9d991f4-kube-api-access-9gtqx\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.927282 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34d0ec26-0662-497d-9cdc-278de9d991f4-logs\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.927394 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/34d0ec26-0662-497d-9cdc-278de9d991f4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.927499 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34d0ec26-0662-497d-9cdc-278de9d991f4-config-data\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.927536 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34d0ec26-0662-497d-9cdc-278de9d991f4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.928079 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.929350 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7pdw\" (UniqueName: \"kubernetes.io/projected/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-kube-api-access-r7pdw\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.930167 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34d0ec26-0662-497d-9cdc-278de9d991f4-logs\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.934955 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34d0ec26-0662-497d-9cdc-278de9d991f4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.935482 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/34d0ec26-0662-497d-9cdc-278de9d991f4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.935491 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34d0ec26-0662-497d-9cdc-278de9d991f4-config-data\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.954976 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gtqx\" (UniqueName: \"kubernetes.io/projected/34d0ec26-0662-497d-9cdc-278de9d991f4-kube-api-access-9gtqx\") pod \"nova-metadata-0\" (UID: \"34d0ec26-0662-497d-9cdc-278de9d991f4\") " pod="openstack/nova-metadata-0" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.975369 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-config-data" (OuterVolumeSpecName: "config-data") pod "f98ab763-d98c-4ad6-bc2e-a943c33b43fc" (UID: "f98ab763-d98c-4ad6-bc2e-a943c33b43fc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:31 crc kubenswrapper[4935]: I1201 18:57:31.983235 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f98ab763-d98c-4ad6-bc2e-a943c33b43fc" (UID: "f98ab763-d98c-4ad6-bc2e-a943c33b43fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.032079 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.032110 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98ab763-d98c-4ad6-bc2e-a943c33b43fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.053533 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.124075 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.134021 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d38f760d-4c52-437b-b407-778866305310-combined-ca-bundle\") pod \"d38f760d-4c52-437b-b407-778866305310\" (UID: \"d38f760d-4c52-437b-b407-778866305310\") " Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.134255 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hs52l\" (UniqueName: \"kubernetes.io/projected/d38f760d-4c52-437b-b407-778866305310-kube-api-access-hs52l\") pod \"d38f760d-4c52-437b-b407-778866305310\" (UID: \"d38f760d-4c52-437b-b407-778866305310\") " Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.134314 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d38f760d-4c52-437b-b407-778866305310-config-data\") pod \"d38f760d-4c52-437b-b407-778866305310\" (UID: \"d38f760d-4c52-437b-b407-778866305310\") " Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.137616 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d38f760d-4c52-437b-b407-778866305310-kube-api-access-hs52l" (OuterVolumeSpecName: "kube-api-access-hs52l") pod "d38f760d-4c52-437b-b407-778866305310" (UID: "d38f760d-4c52-437b-b407-778866305310"). InnerVolumeSpecName "kube-api-access-hs52l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.164098 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d38f760d-4c52-437b-b407-778866305310-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d38f760d-4c52-437b-b407-778866305310" (UID: "d38f760d-4c52-437b-b407-778866305310"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.171419 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d38f760d-4c52-437b-b407-778866305310-config-data" (OuterVolumeSpecName: "config-data") pod "d38f760d-4c52-437b-b407-778866305310" (UID: "d38f760d-4c52-437b-b407-778866305310"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.236546 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hs52l\" (UniqueName: \"kubernetes.io/projected/d38f760d-4c52-437b-b407-778866305310-kube-api-access-hs52l\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.237058 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d38f760d-4c52-437b-b407-778866305310-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.237071 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d38f760d-4c52-437b-b407-778866305310-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.314367 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.314428 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d38f760d-4c52-437b-b407-778866305310","Type":"ContainerDied","Data":"7d9916b38a0b4de328628de1c63f8d753876e9fcdc4b180c880def9e61a553ed"} Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.314493 4935 scope.go:117] "RemoveContainer" containerID="7d9916b38a0b4de328628de1c63f8d753876e9fcdc4b180c880def9e61a553ed" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.314258 4935 generic.go:334] "Generic (PLEG): container finished" podID="d38f760d-4c52-437b-b407-778866305310" containerID="7d9916b38a0b4de328628de1c63f8d753876e9fcdc4b180c880def9e61a553ed" exitCode=0 Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.335300 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d38f760d-4c52-437b-b407-778866305310","Type":"ContainerDied","Data":"bd99f0aacf35a5943edb8ef623cada9f3471bf05835190575c3b38b00fd9143c"} Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.345589 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f98ab763-d98c-4ad6-bc2e-a943c33b43fc","Type":"ContainerDied","Data":"758c94a70926137495fc9195079092e3f19c1f7c1d799824cd5a9ec66654f46e"} Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.345670 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.367585 4935 scope.go:117] "RemoveContainer" containerID="7d9916b38a0b4de328628de1c63f8d753876e9fcdc4b180c880def9e61a553ed" Dec 01 18:57:32 crc kubenswrapper[4935]: E1201 18:57:32.371702 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d9916b38a0b4de328628de1c63f8d753876e9fcdc4b180c880def9e61a553ed\": container with ID starting with 7d9916b38a0b4de328628de1c63f8d753876e9fcdc4b180c880def9e61a553ed not found: ID does not exist" containerID="7d9916b38a0b4de328628de1c63f8d753876e9fcdc4b180c880def9e61a553ed" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.371757 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d9916b38a0b4de328628de1c63f8d753876e9fcdc4b180c880def9e61a553ed"} err="failed to get container status \"7d9916b38a0b4de328628de1c63f8d753876e9fcdc4b180c880def9e61a553ed\": rpc error: code = NotFound desc = could not find container \"7d9916b38a0b4de328628de1c63f8d753876e9fcdc4b180c880def9e61a553ed\": container with ID starting with 7d9916b38a0b4de328628de1c63f8d753876e9fcdc4b180c880def9e61a553ed not found: ID does not exist" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.371799 4935 scope.go:117] "RemoveContainer" containerID="9346e8bfbc1d2b891d3134fba4cea1b121e0a29b099d098b7ca194e78e56a1ca" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.373695 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.392218 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.406308 4935 scope.go:117] "RemoveContainer" containerID="415fbfc5d1f5f339b420457e74ae6b4efa8aa1cc05473e935e0db9969571d772" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.406359 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:57:32 crc kubenswrapper[4935]: E1201 18:57:32.406962 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d38f760d-4c52-437b-b407-778866305310" containerName="nova-scheduler-scheduler" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.406980 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d38f760d-4c52-437b-b407-778866305310" containerName="nova-scheduler-scheduler" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.408056 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="d38f760d-4c52-437b-b407-778866305310" containerName="nova-scheduler-scheduler" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.408961 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.412958 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.421183 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.441073 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.442654 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3f52b8c-640f-40b5-bb76-44c4387d6181-config-data\") pod \"nova-scheduler-0\" (UID: \"f3f52b8c-640f-40b5-bb76-44c4387d6181\") " pod="openstack/nova-scheduler-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.442710 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3f52b8c-640f-40b5-bb76-44c4387d6181-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f3f52b8c-640f-40b5-bb76-44c4387d6181\") " pod="openstack/nova-scheduler-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.442742 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8l4b\" (UniqueName: \"kubernetes.io/projected/f3f52b8c-640f-40b5-bb76-44c4387d6181-kube-api-access-m8l4b\") pod \"nova-scheduler-0\" (UID: \"f3f52b8c-640f-40b5-bb76-44c4387d6181\") " pod="openstack/nova-scheduler-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.443715 4935 scope.go:117] "RemoveContainer" containerID="015bd3e6734d860364fe741192bd980d244211e23c4caca02c351ebad437f6f0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.459745 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-0"] Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.479382 4935 scope.go:117] "RemoveContainer" containerID="4d599b685d5849424ae4b6b625284b667dfe1d7330621fb0468a0340e8587d2c" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.481334 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.486940 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.491333 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-internal-svc" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.492454 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.492837 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-l8dhf" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.493061 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.496584 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-public-svc" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.543813 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3f52b8c-640f-40b5-bb76-44c4387d6181-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f3f52b8c-640f-40b5-bb76-44c4387d6181\") " pod="openstack/nova-scheduler-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.543889 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8l4b\" (UniqueName: \"kubernetes.io/projected/f3f52b8c-640f-40b5-bb76-44c4387d6181-kube-api-access-m8l4b\") pod \"nova-scheduler-0\" (UID: \"f3f52b8c-640f-40b5-bb76-44c4387d6181\") " pod="openstack/nova-scheduler-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.543988 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-scripts\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.544065 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-internal-tls-certs\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.544128 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-combined-ca-bundle\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.544201 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-public-tls-certs\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.544238 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8zkn\" (UniqueName: \"kubernetes.io/projected/32e6fc0e-c91a-4314-bc53-d5053316f8cd-kube-api-access-c8zkn\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.544287 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-config-data\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.544319 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3f52b8c-640f-40b5-bb76-44c4387d6181-config-data\") pod \"nova-scheduler-0\" (UID: \"f3f52b8c-640f-40b5-bb76-44c4387d6181\") " pod="openstack/nova-scheduler-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.546251 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d38f760d-4c52-437b-b407-778866305310" path="/var/lib/kubelet/pods/d38f760d-4c52-437b-b407-778866305310/volumes" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.547002 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d84da5cc-8329-4e71-bd5f-0a3db9819952" path="/var/lib/kubelet/pods/d84da5cc-8329-4e71-bd5f-0a3db9819952/volumes" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.547850 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3f52b8c-640f-40b5-bb76-44c4387d6181-config-data\") pod \"nova-scheduler-0\" (UID: \"f3f52b8c-640f-40b5-bb76-44c4387d6181\") " pod="openstack/nova-scheduler-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.548514 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3f52b8c-640f-40b5-bb76-44c4387d6181-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f3f52b8c-640f-40b5-bb76-44c4387d6181\") " pod="openstack/nova-scheduler-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.548592 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f98ab763-d98c-4ad6-bc2e-a943c33b43fc" path="/var/lib/kubelet/pods/f98ab763-d98c-4ad6-bc2e-a943c33b43fc/volumes" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.549799 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.560404 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8l4b\" (UniqueName: \"kubernetes.io/projected/f3f52b8c-640f-40b5-bb76-44c4387d6181-kube-api-access-m8l4b\") pod \"nova-scheduler-0\" (UID: \"f3f52b8c-640f-40b5-bb76-44c4387d6181\") " pod="openstack/nova-scheduler-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.645395 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-public-tls-certs\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.647639 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8zkn\" (UniqueName: \"kubernetes.io/projected/32e6fc0e-c91a-4314-bc53-d5053316f8cd-kube-api-access-c8zkn\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.648005 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-config-data\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.648133 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-scripts\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.648554 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-internal-tls-certs\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.650107 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-combined-ca-bundle\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.648605 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.651772 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-public-tls-certs\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.651917 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-config-data\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: W1201 18:57:32.652397 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34d0ec26_0662_497d_9cdc_278de9d991f4.slice/crio-aa85d6808ce3189eb7db7b8be246b6756e35919389d9c772fc00ad07fd8b7ae9 WatchSource:0}: Error finding container aa85d6808ce3189eb7db7b8be246b6756e35919389d9c772fc00ad07fd8b7ae9: Status 404 returned error can't find the container with id aa85d6808ce3189eb7db7b8be246b6756e35919389d9c772fc00ad07fd8b7ae9 Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.652879 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-internal-tls-certs\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.654328 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-scripts\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.654962 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-combined-ca-bundle\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.676827 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8zkn\" (UniqueName: \"kubernetes.io/projected/32e6fc0e-c91a-4314-bc53-d5053316f8cd-kube-api-access-c8zkn\") pod \"aodh-0\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " pod="openstack/aodh-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.736200 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 18:57:32 crc kubenswrapper[4935]: I1201 18:57:32.830301 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.344928 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.366100 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"34d0ec26-0662-497d-9cdc-278de9d991f4","Type":"ContainerStarted","Data":"96f6e0e916e4e74166e6441d014d35799768276a39e5907fe6722d6c0ac8a4ff"} Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.366144 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"34d0ec26-0662-497d-9cdc-278de9d991f4","Type":"ContainerStarted","Data":"aa85d6808ce3189eb7db7b8be246b6756e35919389d9c772fc00ad07fd8b7ae9"} Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.374238 4935 generic.go:334] "Generic (PLEG): container finished" podID="7dc76632-c294-4467-8877-c69252ba3c8e" containerID="c62f09ca557d32bf1f35cb5a86cdfb6ebaf8f3b31dd41f749e4554f627329fee" exitCode=0 Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.374324 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7dc76632-c294-4467-8877-c69252ba3c8e","Type":"ContainerDied","Data":"c62f09ca557d32bf1f35cb5a86cdfb6ebaf8f3b31dd41f749e4554f627329fee"} Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.374350 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7dc76632-c294-4467-8877-c69252ba3c8e","Type":"ContainerDied","Data":"c9eb06619589a5b63a9ea748c8441aca7cb017e17b1bf881143e92535b6e0599"} Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.374323 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.374367 4935 scope.go:117] "RemoveContainer" containerID="c62f09ca557d32bf1f35cb5a86cdfb6ebaf8f3b31dd41f749e4554f627329fee" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.406701 4935 scope.go:117] "RemoveContainer" containerID="a9ef25255373ecd4ec2567a3f7fc6cc605f78c9d9284782103f5561375e7c6d9" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.436873 4935 scope.go:117] "RemoveContainer" containerID="c62f09ca557d32bf1f35cb5a86cdfb6ebaf8f3b31dd41f749e4554f627329fee" Dec 01 18:57:33 crc kubenswrapper[4935]: E1201 18:57:33.437645 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c62f09ca557d32bf1f35cb5a86cdfb6ebaf8f3b31dd41f749e4554f627329fee\": container with ID starting with c62f09ca557d32bf1f35cb5a86cdfb6ebaf8f3b31dd41f749e4554f627329fee not found: ID does not exist" containerID="c62f09ca557d32bf1f35cb5a86cdfb6ebaf8f3b31dd41f749e4554f627329fee" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.437677 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c62f09ca557d32bf1f35cb5a86cdfb6ebaf8f3b31dd41f749e4554f627329fee"} err="failed to get container status \"c62f09ca557d32bf1f35cb5a86cdfb6ebaf8f3b31dd41f749e4554f627329fee\": rpc error: code = NotFound desc = could not find container \"c62f09ca557d32bf1f35cb5a86cdfb6ebaf8f3b31dd41f749e4554f627329fee\": container with ID starting with c62f09ca557d32bf1f35cb5a86cdfb6ebaf8f3b31dd41f749e4554f627329fee not found: ID does not exist" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.437699 4935 scope.go:117] "RemoveContainer" containerID="a9ef25255373ecd4ec2567a3f7fc6cc605f78c9d9284782103f5561375e7c6d9" Dec 01 18:57:33 crc kubenswrapper[4935]: E1201 18:57:33.439731 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9ef25255373ecd4ec2567a3f7fc6cc605f78c9d9284782103f5561375e7c6d9\": container with ID starting with a9ef25255373ecd4ec2567a3f7fc6cc605f78c9d9284782103f5561375e7c6d9 not found: ID does not exist" containerID="a9ef25255373ecd4ec2567a3f7fc6cc605f78c9d9284782103f5561375e7c6d9" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.439767 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9ef25255373ecd4ec2567a3f7fc6cc605f78c9d9284782103f5561375e7c6d9"} err="failed to get container status \"a9ef25255373ecd4ec2567a3f7fc6cc605f78c9d9284782103f5561375e7c6d9\": rpc error: code = NotFound desc = could not find container \"a9ef25255373ecd4ec2567a3f7fc6cc605f78c9d9284782103f5561375e7c6d9\": container with ID starting with a9ef25255373ecd4ec2567a3f7fc6cc605f78c9d9284782103f5561375e7c6d9 not found: ID does not exist" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.474074 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-internal-tls-certs\") pod \"7dc76632-c294-4467-8877-c69252ba3c8e\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.474468 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j57d8\" (UniqueName: \"kubernetes.io/projected/7dc76632-c294-4467-8877-c69252ba3c8e-kube-api-access-j57d8\") pod \"7dc76632-c294-4467-8877-c69252ba3c8e\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.474791 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-combined-ca-bundle\") pod \"7dc76632-c294-4467-8877-c69252ba3c8e\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.475086 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-config-data\") pod \"7dc76632-c294-4467-8877-c69252ba3c8e\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.475212 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-public-tls-certs\") pod \"7dc76632-c294-4467-8877-c69252ba3c8e\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.475342 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7dc76632-c294-4467-8877-c69252ba3c8e-logs\") pod \"7dc76632-c294-4467-8877-c69252ba3c8e\" (UID: \"7dc76632-c294-4467-8877-c69252ba3c8e\") " Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.479218 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dc76632-c294-4467-8877-c69252ba3c8e-kube-api-access-j57d8" (OuterVolumeSpecName: "kube-api-access-j57d8") pod "7dc76632-c294-4467-8877-c69252ba3c8e" (UID: "7dc76632-c294-4467-8877-c69252ba3c8e"). InnerVolumeSpecName "kube-api-access-j57d8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.485095 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j57d8\" (UniqueName: \"kubernetes.io/projected/7dc76632-c294-4467-8877-c69252ba3c8e-kube-api-access-j57d8\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.486457 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dc76632-c294-4467-8877-c69252ba3c8e-logs" (OuterVolumeSpecName: "logs") pod "7dc76632-c294-4467-8877-c69252ba3c8e" (UID: "7dc76632-c294-4467-8877-c69252ba3c8e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.510303 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-config-data" (OuterVolumeSpecName: "config-data") pod "7dc76632-c294-4467-8877-c69252ba3c8e" (UID: "7dc76632-c294-4467-8877-c69252ba3c8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.522820 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7dc76632-c294-4467-8877-c69252ba3c8e" (UID: "7dc76632-c294-4467-8877-c69252ba3c8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.537335 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7dc76632-c294-4467-8877-c69252ba3c8e" (UID: "7dc76632-c294-4467-8877-c69252ba3c8e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.562991 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7dc76632-c294-4467-8877-c69252ba3c8e" (UID: "7dc76632-c294-4467-8877-c69252ba3c8e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.586625 4935 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7dc76632-c294-4467-8877-c69252ba3c8e-logs\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.586762 4935 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.586826 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.586878 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.586928 4935 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc76632-c294-4467-8877-c69252ba3c8e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.669313 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.682252 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.712950 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.726374 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.746258 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 18:57:33 crc kubenswrapper[4935]: E1201 18:57:33.747218 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dc76632-c294-4467-8877-c69252ba3c8e" containerName="nova-api-api" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.747324 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dc76632-c294-4467-8877-c69252ba3c8e" containerName="nova-api-api" Dec 01 18:57:33 crc kubenswrapper[4935]: E1201 18:57:33.747447 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dc76632-c294-4467-8877-c69252ba3c8e" containerName="nova-api-log" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.747548 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dc76632-c294-4467-8877-c69252ba3c8e" containerName="nova-api-log" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.747837 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dc76632-c294-4467-8877-c69252ba3c8e" containerName="nova-api-log" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.747953 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dc76632-c294-4467-8877-c69252ba3c8e" containerName="nova-api-api" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.749300 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.755688 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.756032 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.756298 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.793642 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp4hr\" (UniqueName: \"kubernetes.io/projected/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-kube-api-access-vp4hr\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.793717 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.793762 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-public-tls-certs\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.793811 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.793878 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-config-data\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.794023 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-logs\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.802404 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.895222 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-config-data\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.895604 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-logs\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.896067 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-logs\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.899358 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-config-data\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.899435 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp4hr\" (UniqueName: \"kubernetes.io/projected/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-kube-api-access-vp4hr\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.900253 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.900403 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-public-tls-certs\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.900645 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.904458 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.905358 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-public-tls-certs\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.909289 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:33 crc kubenswrapper[4935]: I1201 18:57:33.921531 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp4hr\" (UniqueName: \"kubernetes.io/projected/e84dfe6a-b9aa-406a-9b1a-895d826cd5a7-kube-api-access-vp4hr\") pod \"nova-api-0\" (UID: \"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7\") " pod="openstack/nova-api-0" Dec 01 18:57:34 crc kubenswrapper[4935]: I1201 18:57:34.001973 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 18:57:34 crc kubenswrapper[4935]: I1201 18:57:34.386982 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"34d0ec26-0662-497d-9cdc-278de9d991f4","Type":"ContainerStarted","Data":"3500d215a0be3dae96f3cc83903a0360ae84f14f44327c44c2eb4f54289925a8"} Dec 01 18:57:34 crc kubenswrapper[4935]: I1201 18:57:34.391822 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"32e6fc0e-c91a-4314-bc53-d5053316f8cd","Type":"ContainerStarted","Data":"6c1619208c604c0a35c8726749db796aba8dda87673f26257f17271748dba156"} Dec 01 18:57:34 crc kubenswrapper[4935]: I1201 18:57:34.394070 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f3f52b8c-640f-40b5-bb76-44c4387d6181","Type":"ContainerStarted","Data":"4a380162952c6a31ba12564f2ec9598a7e58f63562cc0efcfb5c8bde64549d5b"} Dec 01 18:57:34 crc kubenswrapper[4935]: I1201 18:57:34.394112 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f3f52b8c-640f-40b5-bb76-44c4387d6181","Type":"ContainerStarted","Data":"377a31c26b8ea3fd18420dd51c9fd810177750472976f5c1499a6583255d8e60"} Dec 01 18:57:34 crc kubenswrapper[4935]: I1201 18:57:34.426951 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.426925076 podStartE2EDuration="3.426925076s" podCreationTimestamp="2025-12-01 18:57:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:57:34.408537677 +0000 UTC m=+1668.430166936" watchObservedRunningTime="2025-12-01 18:57:34.426925076 +0000 UTC m=+1668.448554345" Dec 01 18:57:34 crc kubenswrapper[4935]: I1201 18:57:34.443099 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.443079045 podStartE2EDuration="2.443079045s" podCreationTimestamp="2025-12-01 18:57:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:57:34.432737389 +0000 UTC m=+1668.454366668" watchObservedRunningTime="2025-12-01 18:57:34.443079045 +0000 UTC m=+1668.464708304" Dec 01 18:57:34 crc kubenswrapper[4935]: I1201 18:57:34.501056 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 18:57:34 crc kubenswrapper[4935]: I1201 18:57:34.524909 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dc76632-c294-4467-8877-c69252ba3c8e" path="/var/lib/kubelet/pods/7dc76632-c294-4467-8877-c69252ba3c8e/volumes" Dec 01 18:57:35 crc kubenswrapper[4935]: I1201 18:57:35.415224 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"32e6fc0e-c91a-4314-bc53-d5053316f8cd","Type":"ContainerStarted","Data":"931c8fdc79fdca359e9542bd15ce7813cd151dd0f71262c9ebe1b5b1e826b43f"} Dec 01 18:57:35 crc kubenswrapper[4935]: I1201 18:57:35.416167 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"32e6fc0e-c91a-4314-bc53-d5053316f8cd","Type":"ContainerStarted","Data":"2e1c86da2fc5992d0024d3840d88bc61ce244f4bfbf616bb34a7fb3010f0a6a5"} Dec 01 18:57:35 crc kubenswrapper[4935]: I1201 18:57:35.418643 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7","Type":"ContainerStarted","Data":"522886f5250c58fcdc63b42a3026fd35a01e57f50ee9a56b69c74c78a41e190c"} Dec 01 18:57:35 crc kubenswrapper[4935]: I1201 18:57:35.418686 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7","Type":"ContainerStarted","Data":"66fe280c2a5f0781d87afe7587811e27e18e5ec2190d61345a05570c2d002159"} Dec 01 18:57:35 crc kubenswrapper[4935]: I1201 18:57:35.418698 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e84dfe6a-b9aa-406a-9b1a-895d826cd5a7","Type":"ContainerStarted","Data":"5221e468f7173a3b46725142e0bd1fd9ea74a4712be656dec8ce7ba65cc33dcf"} Dec 01 18:57:35 crc kubenswrapper[4935]: I1201 18:57:35.441434 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.441414194 podStartE2EDuration="2.441414194s" podCreationTimestamp="2025-12-01 18:57:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:57:35.440304499 +0000 UTC m=+1669.461933798" watchObservedRunningTime="2025-12-01 18:57:35.441414194 +0000 UTC m=+1669.463043463" Dec 01 18:57:36 crc kubenswrapper[4935]: I1201 18:57:36.454277 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"32e6fc0e-c91a-4314-bc53-d5053316f8cd","Type":"ContainerStarted","Data":"7f7b0e4c3ebd8f536ef143f9f194f51672d8cd6742642d58610cb60c8dee47ed"} Dec 01 18:57:37 crc kubenswrapper[4935]: I1201 18:57:37.125124 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 18:57:37 crc kubenswrapper[4935]: I1201 18:57:37.125432 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 18:57:37 crc kubenswrapper[4935]: I1201 18:57:37.475755 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"32e6fc0e-c91a-4314-bc53-d5053316f8cd","Type":"ContainerStarted","Data":"4f5a8d434254783f0c5d57a8f4e5c27e4c80fef3e3555347043338d06124bb4b"} Dec 01 18:57:37 crc kubenswrapper[4935]: I1201 18:57:37.507856 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.761700632 podStartE2EDuration="5.507841019s" podCreationTimestamp="2025-12-01 18:57:32 +0000 UTC" firstStartedPulling="2025-12-01 18:57:33.6889609 +0000 UTC m=+1667.710590159" lastFinishedPulling="2025-12-01 18:57:36.435101277 +0000 UTC m=+1670.456730546" observedRunningTime="2025-12-01 18:57:37.499165106 +0000 UTC m=+1671.520794365" watchObservedRunningTime="2025-12-01 18:57:37.507841019 +0000 UTC m=+1671.529470278" Dec 01 18:57:37 crc kubenswrapper[4935]: I1201 18:57:37.739233 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 18:57:41 crc kubenswrapper[4935]: I1201 18:57:41.508791 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:57:41 crc kubenswrapper[4935]: E1201 18:57:41.509727 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:57:42 crc kubenswrapper[4935]: I1201 18:57:42.125129 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 18:57:42 crc kubenswrapper[4935]: I1201 18:57:42.125198 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 18:57:42 crc kubenswrapper[4935]: I1201 18:57:42.736610 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 18:57:42 crc kubenswrapper[4935]: I1201 18:57:42.775992 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 18:57:43 crc kubenswrapper[4935]: I1201 18:57:43.140348 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="34d0ec26-0662-497d-9cdc-278de9d991f4" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.254:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 18:57:43 crc kubenswrapper[4935]: I1201 18:57:43.140357 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="34d0ec26-0662-497d-9cdc-278de9d991f4" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.254:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 18:57:44 crc kubenswrapper[4935]: I1201 18:57:44.002682 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 18:57:44 crc kubenswrapper[4935]: I1201 18:57:44.004467 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 18:57:44 crc kubenswrapper[4935]: I1201 18:57:44.070546 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 18:57:45 crc kubenswrapper[4935]: I1201 18:57:45.025361 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e84dfe6a-b9aa-406a-9b1a-895d826cd5a7" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.1:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 18:57:45 crc kubenswrapper[4935]: I1201 18:57:45.025362 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e84dfe6a-b9aa-406a-9b1a-895d826cd5a7" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.1:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 18:57:45 crc kubenswrapper[4935]: I1201 18:57:45.409705 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.037048 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.037815 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd" containerName="kube-state-metrics" containerID="cri-o://f7c92429b425fad38535c3249acef8b011f34fabd83712ff69c7e116667a66ac" gracePeriod=30 Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.184369 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mysqld-exporter-0"] Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.184910 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mysqld-exporter-0" podUID="4c367332-667b-47f3-b2e6-1a4c759ca7e8" containerName="mysqld-exporter" containerID="cri-o://10dc53b9705f3edc7df6dc900b5169b4c47cca40d7d7aa441b4e6ed62a5a2140" gracePeriod=30 Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.627280 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.726410 4935 generic.go:334] "Generic (PLEG): container finished" podID="c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd" containerID="f7c92429b425fad38535c3249acef8b011f34fabd83712ff69c7e116667a66ac" exitCode=2 Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.726701 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd","Type":"ContainerDied","Data":"f7c92429b425fad38535c3249acef8b011f34fabd83712ff69c7e116667a66ac"} Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.726778 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd","Type":"ContainerDied","Data":"250a592c0089d04b10a9538db998ca84336ce19007ca8a6f2b3e1cdf3ce3f90d"} Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.726854 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.726862 4935 scope.go:117] "RemoveContainer" containerID="f7c92429b425fad38535c3249acef8b011f34fabd83712ff69c7e116667a66ac" Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.736937 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2jg2\" (UniqueName: \"kubernetes.io/projected/c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd-kube-api-access-v2jg2\") pod \"c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd\" (UID: \"c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd\") " Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.744595 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd-kube-api-access-v2jg2" (OuterVolumeSpecName: "kube-api-access-v2jg2") pod "c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd" (UID: "c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd"). InnerVolumeSpecName "kube-api-access-v2jg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.745347 4935 generic.go:334] "Generic (PLEG): container finished" podID="4c367332-667b-47f3-b2e6-1a4c759ca7e8" containerID="10dc53b9705f3edc7df6dc900b5169b4c47cca40d7d7aa441b4e6ed62a5a2140" exitCode=2 Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.745380 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-0" event={"ID":"4c367332-667b-47f3-b2e6-1a4c759ca7e8","Type":"ContainerDied","Data":"10dc53b9705f3edc7df6dc900b5169b4c47cca40d7d7aa441b4e6ed62a5a2140"} Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.787339 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-0" Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.832781 4935 scope.go:117] "RemoveContainer" containerID="f7c92429b425fad38535c3249acef8b011f34fabd83712ff69c7e116667a66ac" Dec 01 18:57:50 crc kubenswrapper[4935]: E1201 18:57:50.836209 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7c92429b425fad38535c3249acef8b011f34fabd83712ff69c7e116667a66ac\": container with ID starting with f7c92429b425fad38535c3249acef8b011f34fabd83712ff69c7e116667a66ac not found: ID does not exist" containerID="f7c92429b425fad38535c3249acef8b011f34fabd83712ff69c7e116667a66ac" Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.836250 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7c92429b425fad38535c3249acef8b011f34fabd83712ff69c7e116667a66ac"} err="failed to get container status \"f7c92429b425fad38535c3249acef8b011f34fabd83712ff69c7e116667a66ac\": rpc error: code = NotFound desc = could not find container \"f7c92429b425fad38535c3249acef8b011f34fabd83712ff69c7e116667a66ac\": container with ID starting with f7c92429b425fad38535c3249acef8b011f34fabd83712ff69c7e116667a66ac not found: ID does not exist" Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.840261 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2jg2\" (UniqueName: \"kubernetes.io/projected/c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd-kube-api-access-v2jg2\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.941296 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c367332-667b-47f3-b2e6-1a4c759ca7e8-combined-ca-bundle\") pod \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\" (UID: \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\") " Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.941357 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4g7lj\" (UniqueName: \"kubernetes.io/projected/4c367332-667b-47f3-b2e6-1a4c759ca7e8-kube-api-access-4g7lj\") pod \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\" (UID: \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\") " Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.942294 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c367332-667b-47f3-b2e6-1a4c759ca7e8-config-data\") pod \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\" (UID: \"4c367332-667b-47f3-b2e6-1a4c759ca7e8\") " Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.947799 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c367332-667b-47f3-b2e6-1a4c759ca7e8-kube-api-access-4g7lj" (OuterVolumeSpecName: "kube-api-access-4g7lj") pod "4c367332-667b-47f3-b2e6-1a4c759ca7e8" (UID: "4c367332-667b-47f3-b2e6-1a4c759ca7e8"). InnerVolumeSpecName "kube-api-access-4g7lj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:57:50 crc kubenswrapper[4935]: I1201 18:57:50.975822 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c367332-667b-47f3-b2e6-1a4c759ca7e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c367332-667b-47f3-b2e6-1a4c759ca7e8" (UID: "4c367332-667b-47f3-b2e6-1a4c759ca7e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.005178 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c367332-667b-47f3-b2e6-1a4c759ca7e8-config-data" (OuterVolumeSpecName: "config-data") pod "4c367332-667b-47f3-b2e6-1a4c759ca7e8" (UID: "4c367332-667b-47f3-b2e6-1a4c759ca7e8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.045291 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c367332-667b-47f3-b2e6-1a4c759ca7e8-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.045324 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c367332-667b-47f3-b2e6-1a4c759ca7e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.045335 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4g7lj\" (UniqueName: \"kubernetes.io/projected/4c367332-667b-47f3-b2e6-1a4c759ca7e8-kube-api-access-4g7lj\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.076077 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.089323 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.101273 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 18:57:51 crc kubenswrapper[4935]: E1201 18:57:51.101868 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c367332-667b-47f3-b2e6-1a4c759ca7e8" containerName="mysqld-exporter" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.101888 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c367332-667b-47f3-b2e6-1a4c759ca7e8" containerName="mysqld-exporter" Dec 01 18:57:51 crc kubenswrapper[4935]: E1201 18:57:51.101915 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd" containerName="kube-state-metrics" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.101922 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd" containerName="kube-state-metrics" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.102199 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c367332-667b-47f3-b2e6-1a4c759ca7e8" containerName="mysqld-exporter" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.102224 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd" containerName="kube-state-metrics" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.103235 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.110161 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.112331 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.112595 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.248341 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s946d\" (UniqueName: \"kubernetes.io/projected/4b4e129a-da45-447c-af5d-6370bfee1066-kube-api-access-s946d\") pod \"kube-state-metrics-0\" (UID: \"4b4e129a-da45-447c-af5d-6370bfee1066\") " pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.248417 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b4e129a-da45-447c-af5d-6370bfee1066-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"4b4e129a-da45-447c-af5d-6370bfee1066\") " pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.248441 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b4e129a-da45-447c-af5d-6370bfee1066-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"4b4e129a-da45-447c-af5d-6370bfee1066\") " pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.248582 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4b4e129a-da45-447c-af5d-6370bfee1066-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"4b4e129a-da45-447c-af5d-6370bfee1066\") " pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.350815 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4b4e129a-da45-447c-af5d-6370bfee1066-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"4b4e129a-da45-447c-af5d-6370bfee1066\") " pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.350933 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s946d\" (UniqueName: \"kubernetes.io/projected/4b4e129a-da45-447c-af5d-6370bfee1066-kube-api-access-s946d\") pod \"kube-state-metrics-0\" (UID: \"4b4e129a-da45-447c-af5d-6370bfee1066\") " pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.351004 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b4e129a-da45-447c-af5d-6370bfee1066-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"4b4e129a-da45-447c-af5d-6370bfee1066\") " pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.351036 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b4e129a-da45-447c-af5d-6370bfee1066-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"4b4e129a-da45-447c-af5d-6370bfee1066\") " pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.354942 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b4e129a-da45-447c-af5d-6370bfee1066-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"4b4e129a-da45-447c-af5d-6370bfee1066\") " pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.355925 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b4e129a-da45-447c-af5d-6370bfee1066-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"4b4e129a-da45-447c-af5d-6370bfee1066\") " pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.356371 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4b4e129a-da45-447c-af5d-6370bfee1066-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"4b4e129a-da45-447c-af5d-6370bfee1066\") " pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.373352 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s946d\" (UniqueName: \"kubernetes.io/projected/4b4e129a-da45-447c-af5d-6370bfee1066-kube-api-access-s946d\") pod \"kube-state-metrics-0\" (UID: \"4b4e129a-da45-447c-af5d-6370bfee1066\") " pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.430820 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.758274 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-0" event={"ID":"4c367332-667b-47f3-b2e6-1a4c759ca7e8","Type":"ContainerDied","Data":"a4784305bddedd5bd83fec2095e87e546ad663f0f7888de57212d1061b045431"} Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.758557 4935 scope.go:117] "RemoveContainer" containerID="10dc53b9705f3edc7df6dc900b5169b4c47cca40d7d7aa441b4e6ed62a5a2140" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.758316 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.802667 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mysqld-exporter-0"] Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.843354 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mysqld-exporter-0"] Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.873279 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mysqld-exporter-0"] Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.876112 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.878184 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"mysqld-exporter-config-data" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.878340 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-mysqld-exporter-svc" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.889231 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-0"] Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.943912 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.952412 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.971557 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6732fd86-efb5-4fec-b8d8-36ed6dfb12eb-config-data\") pod \"mysqld-exporter-0\" (UID: \"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb\") " pod="openstack/mysqld-exporter-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.971737 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bbwx\" (UniqueName: \"kubernetes.io/projected/6732fd86-efb5-4fec-b8d8-36ed6dfb12eb-kube-api-access-9bbwx\") pod \"mysqld-exporter-0\" (UID: \"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb\") " pod="openstack/mysqld-exporter-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.971780 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6732fd86-efb5-4fec-b8d8-36ed6dfb12eb-combined-ca-bundle\") pod \"mysqld-exporter-0\" (UID: \"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb\") " pod="openstack/mysqld-exporter-0" Dec 01 18:57:51 crc kubenswrapper[4935]: I1201 18:57:51.971871 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mysqld-exporter-tls-certs\" (UniqueName: \"kubernetes.io/secret/6732fd86-efb5-4fec-b8d8-36ed6dfb12eb-mysqld-exporter-tls-certs\") pod \"mysqld-exporter-0\" (UID: \"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb\") " pod="openstack/mysqld-exporter-0" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.073641 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bbwx\" (UniqueName: \"kubernetes.io/projected/6732fd86-efb5-4fec-b8d8-36ed6dfb12eb-kube-api-access-9bbwx\") pod \"mysqld-exporter-0\" (UID: \"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb\") " pod="openstack/mysqld-exporter-0" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.073697 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6732fd86-efb5-4fec-b8d8-36ed6dfb12eb-combined-ca-bundle\") pod \"mysqld-exporter-0\" (UID: \"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb\") " pod="openstack/mysqld-exporter-0" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.073774 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mysqld-exporter-tls-certs\" (UniqueName: \"kubernetes.io/secret/6732fd86-efb5-4fec-b8d8-36ed6dfb12eb-mysqld-exporter-tls-certs\") pod \"mysqld-exporter-0\" (UID: \"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb\") " pod="openstack/mysqld-exporter-0" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.073874 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6732fd86-efb5-4fec-b8d8-36ed6dfb12eb-config-data\") pod \"mysqld-exporter-0\" (UID: \"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb\") " pod="openstack/mysqld-exporter-0" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.080557 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mysqld-exporter-tls-certs\" (UniqueName: \"kubernetes.io/secret/6732fd86-efb5-4fec-b8d8-36ed6dfb12eb-mysqld-exporter-tls-certs\") pod \"mysqld-exporter-0\" (UID: \"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb\") " pod="openstack/mysqld-exporter-0" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.080592 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6732fd86-efb5-4fec-b8d8-36ed6dfb12eb-combined-ca-bundle\") pod \"mysqld-exporter-0\" (UID: \"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb\") " pod="openstack/mysqld-exporter-0" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.081354 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6732fd86-efb5-4fec-b8d8-36ed6dfb12eb-config-data\") pod \"mysqld-exporter-0\" (UID: \"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb\") " pod="openstack/mysqld-exporter-0" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.092884 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bbwx\" (UniqueName: \"kubernetes.io/projected/6732fd86-efb5-4fec-b8d8-36ed6dfb12eb-kube-api-access-9bbwx\") pod \"mysqld-exporter-0\" (UID: \"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb\") " pod="openstack/mysqld-exporter-0" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.129667 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.133384 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.139925 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.213126 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mysqld-exporter-0" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.492892 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.493932 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="ceilometer-central-agent" containerID="cri-o://efe3173a5574d2a83c6a8fa1d574b2b291389255d75c6bcc74237ab50a019a67" gracePeriod=30 Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.494049 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="proxy-httpd" containerID="cri-o://41edcf9677c229a49d6fb8ea1319b7a64c47f6a81ce11a865446dd28ae2f849d" gracePeriod=30 Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.494083 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="sg-core" containerID="cri-o://a138dadec0e8590d728166b9f6e4dd19024f575a8c29a4316d4f3f0e93c36cdf" gracePeriod=30 Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.494117 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="ceilometer-notification-agent" containerID="cri-o://e2c86ee668bf34756612494549d233785400d4f2d44120edf6e90badbf6bd211" gracePeriod=30 Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.555468 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c367332-667b-47f3-b2e6-1a4c759ca7e8" path="/var/lib/kubelet/pods/4c367332-667b-47f3-b2e6-1a4c759ca7e8/volumes" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.560015 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd" path="/var/lib/kubelet/pods/c29d0c2e-5d2d-4d69-a64b-1775bb3e3ecd/volumes" Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.700373 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mysqld-exporter-0"] Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.769994 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4b4e129a-da45-447c-af5d-6370bfee1066","Type":"ContainerStarted","Data":"f03a5226f23981cbee4d7a6ed96f0ae2a4866fa375d74fa2f3534e236bad8327"} Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.771659 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-0" event={"ID":"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb","Type":"ContainerStarted","Data":"ca05b81bde218401ba0370afbe88aa743274a3e1bfeff5b68bcaa131dd830abd"} Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.775523 4935 generic.go:334] "Generic (PLEG): container finished" podID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerID="41edcf9677c229a49d6fb8ea1319b7a64c47f6a81ce11a865446dd28ae2f849d" exitCode=0 Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.775547 4935 generic.go:334] "Generic (PLEG): container finished" podID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerID="a138dadec0e8590d728166b9f6e4dd19024f575a8c29a4316d4f3f0e93c36cdf" exitCode=2 Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.775560 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4","Type":"ContainerDied","Data":"41edcf9677c229a49d6fb8ea1319b7a64c47f6a81ce11a865446dd28ae2f849d"} Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.775591 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4","Type":"ContainerDied","Data":"a138dadec0e8590d728166b9f6e4dd19024f575a8c29a4316d4f3f0e93c36cdf"} Dec 01 18:57:52 crc kubenswrapper[4935]: I1201 18:57:52.781495 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 18:57:53 crc kubenswrapper[4935]: I1201 18:57:53.784785 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mysqld-exporter-0" event={"ID":"6732fd86-efb5-4fec-b8d8-36ed6dfb12eb","Type":"ContainerStarted","Data":"056b5850c5372646c2d7af3f098c74227e3665049024879c0d0eae44c2749cf6"} Dec 01 18:57:53 crc kubenswrapper[4935]: I1201 18:57:53.788448 4935 generic.go:334] "Generic (PLEG): container finished" podID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerID="efe3173a5574d2a83c6a8fa1d574b2b291389255d75c6bcc74237ab50a019a67" exitCode=0 Dec 01 18:57:53 crc kubenswrapper[4935]: I1201 18:57:53.788490 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4","Type":"ContainerDied","Data":"efe3173a5574d2a83c6a8fa1d574b2b291389255d75c6bcc74237ab50a019a67"} Dec 01 18:57:53 crc kubenswrapper[4935]: I1201 18:57:53.790733 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4b4e129a-da45-447c-af5d-6370bfee1066","Type":"ContainerStarted","Data":"00c562d767b71aa6de241d42b0a3db89af71a49efa04f962daa708a03b54f50d"} Dec 01 18:57:53 crc kubenswrapper[4935]: I1201 18:57:53.790761 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 01 18:57:53 crc kubenswrapper[4935]: I1201 18:57:53.803615 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mysqld-exporter-0" podStartSLOduration=2.164875735 podStartE2EDuration="2.803601186s" podCreationTimestamp="2025-12-01 18:57:51 +0000 UTC" firstStartedPulling="2025-12-01 18:57:52.725644799 +0000 UTC m=+1686.747274058" lastFinishedPulling="2025-12-01 18:57:53.36437025 +0000 UTC m=+1687.385999509" observedRunningTime="2025-12-01 18:57:53.797527424 +0000 UTC m=+1687.819156683" watchObservedRunningTime="2025-12-01 18:57:53.803601186 +0000 UTC m=+1687.825230445" Dec 01 18:57:53 crc kubenswrapper[4935]: I1201 18:57:53.824672 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.126200666 podStartE2EDuration="2.824650979s" podCreationTimestamp="2025-12-01 18:57:51 +0000 UTC" firstStartedPulling="2025-12-01 18:57:51.952143182 +0000 UTC m=+1685.973772441" lastFinishedPulling="2025-12-01 18:57:52.650593495 +0000 UTC m=+1686.672222754" observedRunningTime="2025-12-01 18:57:53.811830985 +0000 UTC m=+1687.833460244" watchObservedRunningTime="2025-12-01 18:57:53.824650979 +0000 UTC m=+1687.846280238" Dec 01 18:57:54 crc kubenswrapper[4935]: I1201 18:57:54.013199 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 18:57:54 crc kubenswrapper[4935]: I1201 18:57:54.014834 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 18:57:54 crc kubenswrapper[4935]: I1201 18:57:54.014896 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 18:57:54 crc kubenswrapper[4935]: I1201 18:57:54.020018 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 18:57:54 crc kubenswrapper[4935]: E1201 18:57:54.434632 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod779e40f0_ef3f_49f9_bbb7_0ba6d24feef4.slice/crio-e2c86ee668bf34756612494549d233785400d4f2d44120edf6e90badbf6bd211.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod779e40f0_ef3f_49f9_bbb7_0ba6d24feef4.slice/crio-conmon-e2c86ee668bf34756612494549d233785400d4f2d44120edf6e90badbf6bd211.scope\": RecentStats: unable to find data in memory cache]" Dec 01 18:57:54 crc kubenswrapper[4935]: I1201 18:57:54.803770 4935 generic.go:334] "Generic (PLEG): container finished" podID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerID="e2c86ee668bf34756612494549d233785400d4f2d44120edf6e90badbf6bd211" exitCode=0 Dec 01 18:57:54 crc kubenswrapper[4935]: I1201 18:57:54.803870 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4","Type":"ContainerDied","Data":"e2c86ee668bf34756612494549d233785400d4f2d44120edf6e90badbf6bd211"} Dec 01 18:57:54 crc kubenswrapper[4935]: I1201 18:57:54.804283 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4","Type":"ContainerDied","Data":"b3f4a7d195efa22fd2222d4dfba11cc9e809cc7c0332c47c5b79f82191ceb033"} Dec 01 18:57:54 crc kubenswrapper[4935]: I1201 18:57:54.804301 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3f4a7d195efa22fd2222d4dfba11cc9e809cc7c0332c47c5b79f82191ceb033" Dec 01 18:57:54 crc kubenswrapper[4935]: I1201 18:57:54.807021 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 18:57:54 crc kubenswrapper[4935]: I1201 18:57:54.823729 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 18:57:54 crc kubenswrapper[4935]: I1201 18:57:54.945250 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.057250 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-combined-ca-bundle\") pod \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.057398 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-sg-core-conf-yaml\") pod \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.057491 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-scripts\") pod \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.057535 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8txbn\" (UniqueName: \"kubernetes.io/projected/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-kube-api-access-8txbn\") pod \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.057620 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-log-httpd\") pod \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.057724 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-run-httpd\") pod \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.057777 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-config-data\") pod \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\" (UID: \"779e40f0-ef3f-49f9-bbb7-0ba6d24feef4\") " Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.058141 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" (UID: "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.058174 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" (UID: "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.058313 4935 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.058327 4935 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.063374 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-scripts" (OuterVolumeSpecName: "scripts") pod "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" (UID: "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.073358 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-kube-api-access-8txbn" (OuterVolumeSpecName: "kube-api-access-8txbn") pod "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" (UID: "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4"). InnerVolumeSpecName "kube-api-access-8txbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.105123 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" (UID: "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.160612 4935 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.160636 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.160647 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8txbn\" (UniqueName: \"kubernetes.io/projected/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-kube-api-access-8txbn\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.164165 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" (UID: "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.202247 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-config-data" (OuterVolumeSpecName: "config-data") pod "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" (UID: "779e40f0-ef3f-49f9-bbb7-0ba6d24feef4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.263042 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.263067 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.507594 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:57:55 crc kubenswrapper[4935]: E1201 18:57:55.507869 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.813615 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.851546 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.861935 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.881035 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:57:55 crc kubenswrapper[4935]: E1201 18:57:55.881503 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="ceilometer-central-agent" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.881520 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="ceilometer-central-agent" Dec 01 18:57:55 crc kubenswrapper[4935]: E1201 18:57:55.881544 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="sg-core" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.881550 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="sg-core" Dec 01 18:57:55 crc kubenswrapper[4935]: E1201 18:57:55.881571 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="ceilometer-notification-agent" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.881577 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="ceilometer-notification-agent" Dec 01 18:57:55 crc kubenswrapper[4935]: E1201 18:57:55.881598 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="proxy-httpd" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.881604 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="proxy-httpd" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.881826 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="ceilometer-central-agent" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.881843 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="proxy-httpd" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.881855 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="sg-core" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.881870 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" containerName="ceilometer-notification-agent" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.883969 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.888249 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.888559 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.888845 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.906492 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.978790 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.978843 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.978934 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.979184 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-config-data\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.979264 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fcffc0-7035-4a36-bf0d-d01609a4c525-run-httpd\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.979463 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fcffc0-7035-4a36-bf0d-d01609a4c525-log-httpd\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.979675 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86c2v\" (UniqueName: \"kubernetes.io/projected/08fcffc0-7035-4a36-bf0d-d01609a4c525-kube-api-access-86c2v\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:55 crc kubenswrapper[4935]: I1201 18:57:55.979758 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-scripts\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.082480 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-config-data\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.082542 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fcffc0-7035-4a36-bf0d-d01609a4c525-run-httpd\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.082628 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fcffc0-7035-4a36-bf0d-d01609a4c525-log-httpd\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.082759 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86c2v\" (UniqueName: \"kubernetes.io/projected/08fcffc0-7035-4a36-bf0d-d01609a4c525-kube-api-access-86c2v\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.082798 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-scripts\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.082898 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.082926 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.082967 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.083569 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fcffc0-7035-4a36-bf0d-d01609a4c525-run-httpd\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.083800 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fcffc0-7035-4a36-bf0d-d01609a4c525-log-httpd\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.086685 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-scripts\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.088292 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.088949 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-config-data\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.091485 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.098874 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.106704 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86c2v\" (UniqueName: \"kubernetes.io/projected/08fcffc0-7035-4a36-bf0d-d01609a4c525-kube-api-access-86c2v\") pod \"ceilometer-0\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.218468 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.532791 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="779e40f0-ef3f-49f9-bbb7-0ba6d24feef4" path="/var/lib/kubelet/pods/779e40f0-ef3f-49f9-bbb7-0ba6d24feef4/volumes" Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.767634 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:57:56 crc kubenswrapper[4935]: I1201 18:57:56.826061 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fcffc0-7035-4a36-bf0d-d01609a4c525","Type":"ContainerStarted","Data":"e4709a8399e9346f37cabeb1fc3756324c4bd5b38f657b00f598290535f1d6af"} Dec 01 18:57:58 crc kubenswrapper[4935]: I1201 18:57:58.867734 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fcffc0-7035-4a36-bf0d-d01609a4c525","Type":"ContainerStarted","Data":"eedf04ac90a41f9c339e84c28d8e3e97b9f3b29cda259c30c7d8016216de3688"} Dec 01 18:57:59 crc kubenswrapper[4935]: I1201 18:57:59.880386 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fcffc0-7035-4a36-bf0d-d01609a4c525","Type":"ContainerStarted","Data":"61c4c41a9e6abf27ccb29dcffd42f2dc76f7b89ae400f023166f20f442b96938"} Dec 01 18:57:59 crc kubenswrapper[4935]: I1201 18:57:59.880976 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fcffc0-7035-4a36-bf0d-d01609a4c525","Type":"ContainerStarted","Data":"47efb44cffb3f1a23c4931d72ee1ef66f3e544e30d197a15b2d3d95804e5e4b5"} Dec 01 18:58:01 crc kubenswrapper[4935]: I1201 18:58:01.448988 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 01 18:58:01 crc kubenswrapper[4935]: I1201 18:58:01.927822 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fcffc0-7035-4a36-bf0d-d01609a4c525","Type":"ContainerStarted","Data":"82d0a9aa06ea86f86e1e03668d3f795676388a769cb2483bce9ae02847131dde"} Dec 01 18:58:01 crc kubenswrapper[4935]: I1201 18:58:01.928310 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 18:58:01 crc kubenswrapper[4935]: I1201 18:58:01.951922 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.414097281 podStartE2EDuration="6.951901258s" podCreationTimestamp="2025-12-01 18:57:55 +0000 UTC" firstStartedPulling="2025-12-01 18:57:56.760809652 +0000 UTC m=+1690.782438911" lastFinishedPulling="2025-12-01 18:58:01.298613629 +0000 UTC m=+1695.320242888" observedRunningTime="2025-12-01 18:58:01.946986693 +0000 UTC m=+1695.968615972" watchObservedRunningTime="2025-12-01 18:58:01.951901258 +0000 UTC m=+1695.973530517" Dec 01 18:58:08 crc kubenswrapper[4935]: I1201 18:58:08.509420 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:58:08 crc kubenswrapper[4935]: E1201 18:58:08.511064 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:58:22 crc kubenswrapper[4935]: I1201 18:58:22.509033 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:58:22 crc kubenswrapper[4935]: E1201 18:58:22.510198 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:58:26 crc kubenswrapper[4935]: I1201 18:58:26.227820 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 18:58:36 crc kubenswrapper[4935]: I1201 18:58:36.520060 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:58:36 crc kubenswrapper[4935]: E1201 18:58:36.522612 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.410005 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-747dz"] Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.423793 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-747dz"] Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.524137 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7935698e-d40a-4c10-bf91-0a5d8855a09e" path="/var/lib/kubelet/pods/7935698e-d40a-4c10-bf91-0a5d8855a09e/volumes" Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.525185 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-6rt65"] Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.527406 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-6rt65" Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.528109 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-6rt65"] Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.578191 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jljc9\" (UniqueName: \"kubernetes.io/projected/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-kube-api-access-jljc9\") pod \"heat-db-sync-6rt65\" (UID: \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\") " pod="openstack/heat-db-sync-6rt65" Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.578495 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-combined-ca-bundle\") pod \"heat-db-sync-6rt65\" (UID: \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\") " pod="openstack/heat-db-sync-6rt65" Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.578693 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-config-data\") pod \"heat-db-sync-6rt65\" (UID: \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\") " pod="openstack/heat-db-sync-6rt65" Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.681476 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jljc9\" (UniqueName: \"kubernetes.io/projected/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-kube-api-access-jljc9\") pod \"heat-db-sync-6rt65\" (UID: \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\") " pod="openstack/heat-db-sync-6rt65" Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.681609 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-combined-ca-bundle\") pod \"heat-db-sync-6rt65\" (UID: \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\") " pod="openstack/heat-db-sync-6rt65" Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.681701 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-config-data\") pod \"heat-db-sync-6rt65\" (UID: \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\") " pod="openstack/heat-db-sync-6rt65" Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.692319 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-config-data\") pod \"heat-db-sync-6rt65\" (UID: \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\") " pod="openstack/heat-db-sync-6rt65" Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.695049 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-combined-ca-bundle\") pod \"heat-db-sync-6rt65\" (UID: \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\") " pod="openstack/heat-db-sync-6rt65" Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.713646 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jljc9\" (UniqueName: \"kubernetes.io/projected/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-kube-api-access-jljc9\") pod \"heat-db-sync-6rt65\" (UID: \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\") " pod="openstack/heat-db-sync-6rt65" Dec 01 18:58:38 crc kubenswrapper[4935]: I1201 18:58:38.872739 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-6rt65" Dec 01 18:58:39 crc kubenswrapper[4935]: I1201 18:58:39.405224 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-6rt65"] Dec 01 18:58:39 crc kubenswrapper[4935]: W1201 18:58:39.405883 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25a03a1c_18c2_4f1d_96bf_cbb5bdae3749.slice/crio-2cc760d49153b71e05c2665a3d3c9154be9012e40d690ad0d78e7e7a43c93ac0 WatchSource:0}: Error finding container 2cc760d49153b71e05c2665a3d3c9154be9012e40d690ad0d78e7e7a43c93ac0: Status 404 returned error can't find the container with id 2cc760d49153b71e05c2665a3d3c9154be9012e40d690ad0d78e7e7a43c93ac0 Dec 01 18:58:40 crc kubenswrapper[4935]: I1201 18:58:40.480976 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-6rt65" event={"ID":"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749","Type":"ContainerStarted","Data":"2cc760d49153b71e05c2665a3d3c9154be9012e40d690ad0d78e7e7a43c93ac0"} Dec 01 18:58:40 crc kubenswrapper[4935]: I1201 18:58:40.503696 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:58:40 crc kubenswrapper[4935]: I1201 18:58:40.503963 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="ceilometer-central-agent" containerID="cri-o://eedf04ac90a41f9c339e84c28d8e3e97b9f3b29cda259c30c7d8016216de3688" gracePeriod=30 Dec 01 18:58:40 crc kubenswrapper[4935]: I1201 18:58:40.504039 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="proxy-httpd" containerID="cri-o://82d0a9aa06ea86f86e1e03668d3f795676388a769cb2483bce9ae02847131dde" gracePeriod=30 Dec 01 18:58:40 crc kubenswrapper[4935]: I1201 18:58:40.504074 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="sg-core" containerID="cri-o://61c4c41a9e6abf27ccb29dcffd42f2dc76f7b89ae400f023166f20f442b96938" gracePeriod=30 Dec 01 18:58:40 crc kubenswrapper[4935]: I1201 18:58:40.504109 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="ceilometer-notification-agent" containerID="cri-o://47efb44cffb3f1a23c4931d72ee1ef66f3e544e30d197a15b2d3d95804e5e4b5" gracePeriod=30 Dec 01 18:58:40 crc kubenswrapper[4935]: I1201 18:58:40.999527 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 18:58:41 crc kubenswrapper[4935]: I1201 18:58:41.518891 4935 generic.go:334] "Generic (PLEG): container finished" podID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerID="82d0a9aa06ea86f86e1e03668d3f795676388a769cb2483bce9ae02847131dde" exitCode=0 Dec 01 18:58:41 crc kubenswrapper[4935]: I1201 18:58:41.518922 4935 generic.go:334] "Generic (PLEG): container finished" podID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerID="61c4c41a9e6abf27ccb29dcffd42f2dc76f7b89ae400f023166f20f442b96938" exitCode=2 Dec 01 18:58:41 crc kubenswrapper[4935]: I1201 18:58:41.518929 4935 generic.go:334] "Generic (PLEG): container finished" podID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerID="47efb44cffb3f1a23c4931d72ee1ef66f3e544e30d197a15b2d3d95804e5e4b5" exitCode=0 Dec 01 18:58:41 crc kubenswrapper[4935]: I1201 18:58:41.518936 4935 generic.go:334] "Generic (PLEG): container finished" podID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerID="eedf04ac90a41f9c339e84c28d8e3e97b9f3b29cda259c30c7d8016216de3688" exitCode=0 Dec 01 18:58:41 crc kubenswrapper[4935]: I1201 18:58:41.518955 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fcffc0-7035-4a36-bf0d-d01609a4c525","Type":"ContainerDied","Data":"82d0a9aa06ea86f86e1e03668d3f795676388a769cb2483bce9ae02847131dde"} Dec 01 18:58:41 crc kubenswrapper[4935]: I1201 18:58:41.518979 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fcffc0-7035-4a36-bf0d-d01609a4c525","Type":"ContainerDied","Data":"61c4c41a9e6abf27ccb29dcffd42f2dc76f7b89ae400f023166f20f442b96938"} Dec 01 18:58:41 crc kubenswrapper[4935]: I1201 18:58:41.518989 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fcffc0-7035-4a36-bf0d-d01609a4c525","Type":"ContainerDied","Data":"47efb44cffb3f1a23c4931d72ee1ef66f3e544e30d197a15b2d3d95804e5e4b5"} Dec 01 18:58:41 crc kubenswrapper[4935]: I1201 18:58:41.519001 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fcffc0-7035-4a36-bf0d-d01609a4c525","Type":"ContainerDied","Data":"eedf04ac90a41f9c339e84c28d8e3e97b9f3b29cda259c30c7d8016216de3688"} Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.122321 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.184420 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-ceilometer-tls-certs\") pod \"08fcffc0-7035-4a36-bf0d-d01609a4c525\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.184478 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86c2v\" (UniqueName: \"kubernetes.io/projected/08fcffc0-7035-4a36-bf0d-d01609a4c525-kube-api-access-86c2v\") pod \"08fcffc0-7035-4a36-bf0d-d01609a4c525\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.184494 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-combined-ca-bundle\") pod \"08fcffc0-7035-4a36-bf0d-d01609a4c525\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.184565 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fcffc0-7035-4a36-bf0d-d01609a4c525-run-httpd\") pod \"08fcffc0-7035-4a36-bf0d-d01609a4c525\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.184627 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fcffc0-7035-4a36-bf0d-d01609a4c525-log-httpd\") pod \"08fcffc0-7035-4a36-bf0d-d01609a4c525\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.184688 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-config-data\") pod \"08fcffc0-7035-4a36-bf0d-d01609a4c525\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.184785 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-scripts\") pod \"08fcffc0-7035-4a36-bf0d-d01609a4c525\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.184819 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-sg-core-conf-yaml\") pod \"08fcffc0-7035-4a36-bf0d-d01609a4c525\" (UID: \"08fcffc0-7035-4a36-bf0d-d01609a4c525\") " Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.185728 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08fcffc0-7035-4a36-bf0d-d01609a4c525-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "08fcffc0-7035-4a36-bf0d-d01609a4c525" (UID: "08fcffc0-7035-4a36-bf0d-d01609a4c525"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.186335 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08fcffc0-7035-4a36-bf0d-d01609a4c525-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "08fcffc0-7035-4a36-bf0d-d01609a4c525" (UID: "08fcffc0-7035-4a36-bf0d-d01609a4c525"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.218406 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08fcffc0-7035-4a36-bf0d-d01609a4c525-kube-api-access-86c2v" (OuterVolumeSpecName: "kube-api-access-86c2v") pod "08fcffc0-7035-4a36-bf0d-d01609a4c525" (UID: "08fcffc0-7035-4a36-bf0d-d01609a4c525"). InnerVolumeSpecName "kube-api-access-86c2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.270701 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-scripts" (OuterVolumeSpecName: "scripts") pod "08fcffc0-7035-4a36-bf0d-d01609a4c525" (UID: "08fcffc0-7035-4a36-bf0d-d01609a4c525"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.289569 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86c2v\" (UniqueName: \"kubernetes.io/projected/08fcffc0-7035-4a36-bf0d-d01609a4c525-kube-api-access-86c2v\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.289606 4935 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fcffc0-7035-4a36-bf0d-d01609a4c525-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.289614 4935 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fcffc0-7035-4a36-bf0d-d01609a4c525-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.289622 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.328281 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "08fcffc0-7035-4a36-bf0d-d01609a4c525" (UID: "08fcffc0-7035-4a36-bf0d-d01609a4c525"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.331647 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.369668 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "08fcffc0-7035-4a36-bf0d-d01609a4c525" (UID: "08fcffc0-7035-4a36-bf0d-d01609a4c525"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.391657 4935 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.391686 4935 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.450549 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08fcffc0-7035-4a36-bf0d-d01609a4c525" (UID: "08fcffc0-7035-4a36-bf0d-d01609a4c525"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.469561 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-config-data" (OuterVolumeSpecName: "config-data") pod "08fcffc0-7035-4a36-bf0d-d01609a4c525" (UID: "08fcffc0-7035-4a36-bf0d-d01609a4c525"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.493485 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.493513 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08fcffc0-7035-4a36-bf0d-d01609a4c525-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.539158 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fcffc0-7035-4a36-bf0d-d01609a4c525","Type":"ContainerDied","Data":"e4709a8399e9346f37cabeb1fc3756324c4bd5b38f657b00f598290535f1d6af"} Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.539205 4935 scope.go:117] "RemoveContainer" containerID="82d0a9aa06ea86f86e1e03668d3f795676388a769cb2483bce9ae02847131dde" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.539350 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.604378 4935 scope.go:117] "RemoveContainer" containerID="61c4c41a9e6abf27ccb29dcffd42f2dc76f7b89ae400f023166f20f442b96938" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.604794 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.618235 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.645275 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:58:42 crc kubenswrapper[4935]: E1201 18:58:42.645805 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="ceilometer-notification-agent" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.645824 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="ceilometer-notification-agent" Dec 01 18:58:42 crc kubenswrapper[4935]: E1201 18:58:42.645863 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="ceilometer-central-agent" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.645872 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="ceilometer-central-agent" Dec 01 18:58:42 crc kubenswrapper[4935]: E1201 18:58:42.645885 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="sg-core" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.645891 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="sg-core" Dec 01 18:58:42 crc kubenswrapper[4935]: E1201 18:58:42.645906 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="proxy-httpd" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.645912 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="proxy-httpd" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.646136 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="ceilometer-central-agent" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.646173 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="ceilometer-notification-agent" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.646186 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="proxy-httpd" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.646197 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" containerName="sg-core" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.648215 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.651852 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.652087 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.655817 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.664395 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.689969 4935 scope.go:117] "RemoveContainer" containerID="47efb44cffb3f1a23c4931d72ee1ef66f3e544e30d197a15b2d3d95804e5e4b5" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.703569 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75lqf\" (UniqueName: \"kubernetes.io/projected/0f43eed1-cea2-4621-8fec-f55587776177-kube-api-access-75lqf\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.703641 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.703696 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-scripts\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.703757 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.703821 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0f43eed1-cea2-4621-8fec-f55587776177-run-httpd\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.703908 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0f43eed1-cea2-4621-8fec-f55587776177-log-httpd\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.703952 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-config-data\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.703975 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.735816 4935 scope.go:117] "RemoveContainer" containerID="eedf04ac90a41f9c339e84c28d8e3e97b9f3b29cda259c30c7d8016216de3688" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.805460 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.805553 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0f43eed1-cea2-4621-8fec-f55587776177-run-httpd\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.805639 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0f43eed1-cea2-4621-8fec-f55587776177-log-httpd\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.805673 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-config-data\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.805698 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.805727 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75lqf\" (UniqueName: \"kubernetes.io/projected/0f43eed1-cea2-4621-8fec-f55587776177-kube-api-access-75lqf\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.805757 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.805782 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-scripts\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.806334 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0f43eed1-cea2-4621-8fec-f55587776177-run-httpd\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.807270 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0f43eed1-cea2-4621-8fec-f55587776177-log-httpd\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.809730 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.809820 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-scripts\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.810130 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.810209 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.810245 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f43eed1-cea2-4621-8fec-f55587776177-config-data\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.821751 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75lqf\" (UniqueName: \"kubernetes.io/projected/0f43eed1-cea2-4621-8fec-f55587776177-kube-api-access-75lqf\") pod \"ceilometer-0\" (UID: \"0f43eed1-cea2-4621-8fec-f55587776177\") " pod="openstack/ceilometer-0" Dec 01 18:58:42 crc kubenswrapper[4935]: I1201 18:58:42.978401 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 18:58:43 crc kubenswrapper[4935]: I1201 18:58:43.589615 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 18:58:43 crc kubenswrapper[4935]: W1201 18:58:43.619756 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f43eed1_cea2_4621_8fec_f55587776177.slice/crio-be4675393fbdae0b7e3a1bec32444282225fe032963d8545d9fc8c8445b65bdb WatchSource:0}: Error finding container be4675393fbdae0b7e3a1bec32444282225fe032963d8545d9fc8c8445b65bdb: Status 404 returned error can't find the container with id be4675393fbdae0b7e3a1bec32444282225fe032963d8545d9fc8c8445b65bdb Dec 01 18:58:44 crc kubenswrapper[4935]: I1201 18:58:44.527348 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08fcffc0-7035-4a36-bf0d-d01609a4c525" path="/var/lib/kubelet/pods/08fcffc0-7035-4a36-bf0d-d01609a4c525/volumes" Dec 01 18:58:44 crc kubenswrapper[4935]: I1201 18:58:44.597576 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0f43eed1-cea2-4621-8fec-f55587776177","Type":"ContainerStarted","Data":"be4675393fbdae0b7e3a1bec32444282225fe032963d8545d9fc8c8445b65bdb"} Dec 01 18:58:45 crc kubenswrapper[4935]: I1201 18:58:45.579445 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="8bd64079-678d-43de-aeb6-6818338d5997" containerName="rabbitmq" containerID="cri-o://0be5d294c5202f90c28380b88ad9abf322598572225f809dbb7af28d0693b16d" gracePeriod=604796 Dec 01 18:58:46 crc kubenswrapper[4935]: I1201 18:58:46.410862 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="8bd64079-678d-43de-aeb6-6818338d5997" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.127:5671: connect: connection refused" Dec 01 18:58:47 crc kubenswrapper[4935]: I1201 18:58:47.293496 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="fce93449-11d7-490f-9456-8f8667b9cb6d" containerName="rabbitmq" containerID="cri-o://a6b083bbcbc548037c8291d97d5dd01282c2a97b1d1b9a9866a7df79c9eed3e9" gracePeriod=604796 Dec 01 18:58:49 crc kubenswrapper[4935]: I1201 18:58:49.508138 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:58:49 crc kubenswrapper[4935]: E1201 18:58:49.508851 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.644937 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.650900 4935 scope.go:117] "RemoveContainer" containerID="0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.751226 4935 generic.go:334] "Generic (PLEG): container finished" podID="8bd64079-678d-43de-aeb6-6818338d5997" containerID="0be5d294c5202f90c28380b88ad9abf322598572225f809dbb7af28d0693b16d" exitCode=0 Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.751407 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.751450 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8bd64079-678d-43de-aeb6-6818338d5997","Type":"ContainerDied","Data":"0be5d294c5202f90c28380b88ad9abf322598572225f809dbb7af28d0693b16d"} Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.751652 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8bd64079-678d-43de-aeb6-6818338d5997","Type":"ContainerDied","Data":"d8b01d11c0115a84afa8993de3e409974a4da2ca3769434db6a71c312eeeb9f0"} Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.751679 4935 scope.go:117] "RemoveContainer" containerID="0be5d294c5202f90c28380b88ad9abf322598572225f809dbb7af28d0693b16d" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.791081 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpmjh\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-kube-api-access-gpmjh\") pod \"8bd64079-678d-43de-aeb6-6818338d5997\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.791130 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8bd64079-678d-43de-aeb6-6818338d5997-erlang-cookie-secret\") pod \"8bd64079-678d-43de-aeb6-6818338d5997\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.791353 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-plugins-conf\") pod \"8bd64079-678d-43de-aeb6-6818338d5997\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.791407 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-server-conf\") pod \"8bd64079-678d-43de-aeb6-6818338d5997\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.791497 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-erlang-cookie\") pod \"8bd64079-678d-43de-aeb6-6818338d5997\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.791544 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8bd64079-678d-43de-aeb6-6818338d5997-pod-info\") pod \"8bd64079-678d-43de-aeb6-6818338d5997\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.791578 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-plugins\") pod \"8bd64079-678d-43de-aeb6-6818338d5997\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.791644 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-config-data\") pod \"8bd64079-678d-43de-aeb6-6818338d5997\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.791706 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-tls\") pod \"8bd64079-678d-43de-aeb6-6818338d5997\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.791782 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"8bd64079-678d-43de-aeb6-6818338d5997\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.791886 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-confd\") pod \"8bd64079-678d-43de-aeb6-6818338d5997\" (UID: \"8bd64079-678d-43de-aeb6-6818338d5997\") " Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.794511 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "8bd64079-678d-43de-aeb6-6818338d5997" (UID: "8bd64079-678d-43de-aeb6-6818338d5997"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.799302 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "8bd64079-678d-43de-aeb6-6818338d5997" (UID: "8bd64079-678d-43de-aeb6-6818338d5997"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.799856 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-kube-api-access-gpmjh" (OuterVolumeSpecName: "kube-api-access-gpmjh") pod "8bd64079-678d-43de-aeb6-6818338d5997" (UID: "8bd64079-678d-43de-aeb6-6818338d5997"). InnerVolumeSpecName "kube-api-access-gpmjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.802430 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "8bd64079-678d-43de-aeb6-6818338d5997" (UID: "8bd64079-678d-43de-aeb6-6818338d5997"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.805611 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bd64079-678d-43de-aeb6-6818338d5997-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "8bd64079-678d-43de-aeb6-6818338d5997" (UID: "8bd64079-678d-43de-aeb6-6818338d5997"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.807704 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "8bd64079-678d-43de-aeb6-6818338d5997" (UID: "8bd64079-678d-43de-aeb6-6818338d5997"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.807780 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/8bd64079-678d-43de-aeb6-6818338d5997-pod-info" (OuterVolumeSpecName: "pod-info") pod "8bd64079-678d-43de-aeb6-6818338d5997" (UID: "8bd64079-678d-43de-aeb6-6818338d5997"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.837978 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-config-data" (OuterVolumeSpecName: "config-data") pod "8bd64079-678d-43de-aeb6-6818338d5997" (UID: "8bd64079-678d-43de-aeb6-6818338d5997"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.871826 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "8bd64079-678d-43de-aeb6-6818338d5997" (UID: "8bd64079-678d-43de-aeb6-6818338d5997"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.895553 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpmjh\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-kube-api-access-gpmjh\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.895585 4935 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8bd64079-678d-43de-aeb6-6818338d5997-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.895598 4935 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.895607 4935 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.895615 4935 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8bd64079-678d-43de-aeb6-6818338d5997-pod-info\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.895633 4935 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.895640 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.895648 4935 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.895675 4935 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.906099 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-server-conf" (OuterVolumeSpecName: "server-conf") pod "8bd64079-678d-43de-aeb6-6818338d5997" (UID: "8bd64079-678d-43de-aeb6-6818338d5997"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.951745 4935 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.966491 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "8bd64079-678d-43de-aeb6-6818338d5997" (UID: "8bd64079-678d-43de-aeb6-6818338d5997"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.998817 4935 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8bd64079-678d-43de-aeb6-6818338d5997-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.998855 4935 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8bd64079-678d-43de-aeb6-6818338d5997-server-conf\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:52 crc kubenswrapper[4935]: I1201 18:58:52.998867 4935 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.153558 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.175216 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.188731 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 18:58:53 crc kubenswrapper[4935]: E1201 18:58:53.189466 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bd64079-678d-43de-aeb6-6818338d5997" containerName="rabbitmq" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.189534 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bd64079-678d-43de-aeb6-6818338d5997" containerName="rabbitmq" Dec 01 18:58:53 crc kubenswrapper[4935]: E1201 18:58:53.189609 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bd64079-678d-43de-aeb6-6818338d5997" containerName="setup-container" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.189667 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bd64079-678d-43de-aeb6-6818338d5997" containerName="setup-container" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.189962 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bd64079-678d-43de-aeb6-6818338d5997" containerName="rabbitmq" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.191355 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.197618 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-lkt6v" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.197988 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.198439 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.198656 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.198771 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.198861 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.199112 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.229479 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.304322 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.304804 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.304907 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.304932 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.304991 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.305017 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zgl5\" (UniqueName: \"kubernetes.io/projected/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-kube-api-access-7zgl5\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.305123 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-config-data\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.305302 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.305412 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.305431 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.305485 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.407525 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.407624 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.407656 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.407698 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.407723 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zgl5\" (UniqueName: \"kubernetes.io/projected/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-kube-api-access-7zgl5\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.407755 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-config-data\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.407815 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.407861 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.407879 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.407907 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.407957 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.408366 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.408468 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.408551 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.409404 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.409537 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-config-data\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.409578 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.412508 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.412536 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.412722 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.422663 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.463249 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zgl5\" (UniqueName: \"kubernetes.io/projected/428c8cdb-5fa3-4a5e-b249-1bb3713220a4-kube-api-access-7zgl5\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.498110 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"428c8cdb-5fa3-4a5e-b249-1bb3713220a4\") " pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.528443 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.771969 4935 generic.go:334] "Generic (PLEG): container finished" podID="fce93449-11d7-490f-9456-8f8667b9cb6d" containerID="a6b083bbcbc548037c8291d97d5dd01282c2a97b1d1b9a9866a7df79c9eed3e9" exitCode=0 Dec 01 18:58:53 crc kubenswrapper[4935]: I1201 18:58:53.772026 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"fce93449-11d7-490f-9456-8f8667b9cb6d","Type":"ContainerDied","Data":"a6b083bbcbc548037c8291d97d5dd01282c2a97b1d1b9a9866a7df79c9eed3e9"} Dec 01 18:58:54 crc kubenswrapper[4935]: I1201 18:58:54.521992 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bd64079-678d-43de-aeb6-6818338d5997" path="/var/lib/kubelet/pods/8bd64079-678d-43de-aeb6-6818338d5997/volumes" Dec 01 18:58:56 crc kubenswrapper[4935]: I1201 18:58:56.788929 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="fce93449-11d7-490f-9456-8f8667b9cb6d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.128:5671: connect: connection refused" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.485633 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b75489c6f-hsqm4"] Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.493227 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.496479 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.504967 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b75489c6f-hsqm4"] Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.614723 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-ovsdbserver-sb\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.614779 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-config\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.614825 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-ovsdbserver-nb\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.614848 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-openstack-edpm-ipam\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.615748 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dbd5\" (UniqueName: \"kubernetes.io/projected/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-kube-api-access-2dbd5\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.615850 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-svc\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.615911 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-swift-storage-0\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.719662 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-ovsdbserver-sb\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.719785 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-config\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.720460 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-ovsdbserver-sb\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.721031 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-config\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.721120 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-ovsdbserver-nb\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.725441 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-ovsdbserver-nb\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.725600 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-openstack-edpm-ipam\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.726236 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-openstack-edpm-ipam\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.726547 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dbd5\" (UniqueName: \"kubernetes.io/projected/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-kube-api-access-2dbd5\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.726914 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-svc\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.727511 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-svc\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.727634 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-swift-storage-0\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.728208 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-swift-storage-0\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.744993 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dbd5\" (UniqueName: \"kubernetes.io/projected/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-kube-api-access-2dbd5\") pod \"dnsmasq-dns-5b75489c6f-hsqm4\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:58:57 crc kubenswrapper[4935]: I1201 18:58:57.822988 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:59:03 crc kubenswrapper[4935]: I1201 18:59:03.297449 4935 scope.go:117] "RemoveContainer" containerID="0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a" Dec 01 18:59:03 crc kubenswrapper[4935]: E1201 18:59:03.318247 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-heat-engine:current-tested" Dec 01 18:59:03 crc kubenswrapper[4935]: E1201 18:59:03.318310 4935 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-heat-engine:current-tested" Dec 01 18:59:03 crc kubenswrapper[4935]: E1201 18:59:03.318449 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:heat-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-heat-engine:current-tested,Command:[/bin/bash],Args:[-c /usr/bin/heat-manage --config-dir /etc/heat/heat.conf.d db_sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/heat/heat.conf.d/00-default.conf,SubPath:00-default.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/heat/heat.conf.d/01-custom.conf,SubPath:01-custom.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jljc9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42418,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42418,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-db-sync-6rt65_openstack(25a03a1c-18c2-4f1d-96bf-cbb5bdae3749): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:59:03 crc kubenswrapper[4935]: E1201 18:59:03.319948 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/heat-db-sync-6rt65" podUID="25a03a1c-18c2-4f1d-96bf-cbb5bdae3749" Dec 01 18:59:03 crc kubenswrapper[4935]: E1201 18:59:03.924607 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-heat-engine:current-tested\\\"\"" pod="openstack/heat-db-sync-6rt65" podUID="25a03a1c-18c2-4f1d-96bf-cbb5bdae3749" Dec 01 18:59:04 crc kubenswrapper[4935]: I1201 18:59:04.509113 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:59:04 crc kubenswrapper[4935]: E1201 18:59:04.510095 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:59:04 crc kubenswrapper[4935]: I1201 18:59:04.967108 4935 scope.go:117] "RemoveContainer" containerID="2a9c1fb9278fc32fc35a34867d0f98c704f913d00e0d0be7c9a9c4a305be56e4" Dec 01 18:59:04 crc kubenswrapper[4935]: E1201 18:59:04.982402 4935 log.go:32] "RemoveContainer from runtime service failed" err="rpc error: code = Unknown desc = failed to delete container k8s_setup-container_rabbitmq-server-0_openstack_8bd64079-678d-43de-aeb6-6818338d5997_0 in pod sandbox d8b01d11c0115a84afa8993de3e409974a4da2ca3769434db6a71c312eeeb9f0 from index: no such id: '0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a'" containerID="0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a" Dec 01 18:59:04 crc kubenswrapper[4935]: I1201 18:59:04.982449 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a"} err="rpc error: code = Unknown desc = failed to delete container k8s_setup-container_rabbitmq-server-0_openstack_8bd64079-678d-43de-aeb6-6818338d5997_0 in pod sandbox d8b01d11c0115a84afa8993de3e409974a4da2ca3769434db6a71c312eeeb9f0 from index: no such id: '0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a'" Dec 01 18:59:04 crc kubenswrapper[4935]: I1201 18:59:04.982474 4935 scope.go:117] "RemoveContainer" containerID="0be5d294c5202f90c28380b88ad9abf322598572225f809dbb7af28d0693b16d" Dec 01 18:59:04 crc kubenswrapper[4935]: E1201 18:59:04.982865 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0be5d294c5202f90c28380b88ad9abf322598572225f809dbb7af28d0693b16d\": container with ID starting with 0be5d294c5202f90c28380b88ad9abf322598572225f809dbb7af28d0693b16d not found: ID does not exist" containerID="0be5d294c5202f90c28380b88ad9abf322598572225f809dbb7af28d0693b16d" Dec 01 18:59:04 crc kubenswrapper[4935]: I1201 18:59:04.982893 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0be5d294c5202f90c28380b88ad9abf322598572225f809dbb7af28d0693b16d"} err="failed to get container status \"0be5d294c5202f90c28380b88ad9abf322598572225f809dbb7af28d0693b16d\": rpc error: code = NotFound desc = could not find container \"0be5d294c5202f90c28380b88ad9abf322598572225f809dbb7af28d0693b16d\": container with ID starting with 0be5d294c5202f90c28380b88ad9abf322598572225f809dbb7af28d0693b16d not found: ID does not exist" Dec 01 18:59:04 crc kubenswrapper[4935]: I1201 18:59:04.982907 4935 scope.go:117] "RemoveContainer" containerID="0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a" Dec 01 18:59:04 crc kubenswrapper[4935]: E1201 18:59:04.983085 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a\": container with ID starting with 0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a not found: ID does not exist" containerID="0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a" Dec 01 18:59:04 crc kubenswrapper[4935]: I1201 18:59:04.983106 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a"} err="failed to get container status \"0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a\": rpc error: code = NotFound desc = could not find container \"0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a\": container with ID starting with 0f663325c9e5fc9b4d6afe2d30c9e889d6303453598c4e3e770365125f47f05a not found: ID does not exist" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.119012 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.245917 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7gpf\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-kube-api-access-m7gpf\") pod \"fce93449-11d7-490f-9456-8f8667b9cb6d\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.246015 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fce93449-11d7-490f-9456-8f8667b9cb6d-erlang-cookie-secret\") pod \"fce93449-11d7-490f-9456-8f8667b9cb6d\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.246137 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-config-data\") pod \"fce93449-11d7-490f-9456-8f8667b9cb6d\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.246179 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-confd\") pod \"fce93449-11d7-490f-9456-8f8667b9cb6d\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.246213 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fce93449-11d7-490f-9456-8f8667b9cb6d-pod-info\") pod \"fce93449-11d7-490f-9456-8f8667b9cb6d\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.246302 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-erlang-cookie\") pod \"fce93449-11d7-490f-9456-8f8667b9cb6d\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.246383 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-plugins-conf\") pod \"fce93449-11d7-490f-9456-8f8667b9cb6d\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.246411 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-server-conf\") pod \"fce93449-11d7-490f-9456-8f8667b9cb6d\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.246445 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"fce93449-11d7-490f-9456-8f8667b9cb6d\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.246544 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-tls\") pod \"fce93449-11d7-490f-9456-8f8667b9cb6d\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.246637 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-plugins\") pod \"fce93449-11d7-490f-9456-8f8667b9cb6d\" (UID: \"fce93449-11d7-490f-9456-8f8667b9cb6d\") " Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.247428 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "fce93449-11d7-490f-9456-8f8667b9cb6d" (UID: "fce93449-11d7-490f-9456-8f8667b9cb6d"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.248279 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "fce93449-11d7-490f-9456-8f8667b9cb6d" (UID: "fce93449-11d7-490f-9456-8f8667b9cb6d"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.253358 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "fce93449-11d7-490f-9456-8f8667b9cb6d" (UID: "fce93449-11d7-490f-9456-8f8667b9cb6d"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.254422 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-kube-api-access-m7gpf" (OuterVolumeSpecName: "kube-api-access-m7gpf") pod "fce93449-11d7-490f-9456-8f8667b9cb6d" (UID: "fce93449-11d7-490f-9456-8f8667b9cb6d"). InnerVolumeSpecName "kube-api-access-m7gpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.255259 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fce93449-11d7-490f-9456-8f8667b9cb6d-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "fce93449-11d7-490f-9456-8f8667b9cb6d" (UID: "fce93449-11d7-490f-9456-8f8667b9cb6d"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.257050 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/fce93449-11d7-490f-9456-8f8667b9cb6d-pod-info" (OuterVolumeSpecName: "pod-info") pod "fce93449-11d7-490f-9456-8f8667b9cb6d" (UID: "fce93449-11d7-490f-9456-8f8667b9cb6d"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.296315 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "persistence") pod "fce93449-11d7-490f-9456-8f8667b9cb6d" (UID: "fce93449-11d7-490f-9456-8f8667b9cb6d"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.296437 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "fce93449-11d7-490f-9456-8f8667b9cb6d" (UID: "fce93449-11d7-490f-9456-8f8667b9cb6d"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.327110 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-server-conf" (OuterVolumeSpecName: "server-conf") pod "fce93449-11d7-490f-9456-8f8667b9cb6d" (UID: "fce93449-11d7-490f-9456-8f8667b9cb6d"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.349799 4935 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.349834 4935 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.349846 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7gpf\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-kube-api-access-m7gpf\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.349860 4935 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fce93449-11d7-490f-9456-8f8667b9cb6d-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.349870 4935 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fce93449-11d7-490f-9456-8f8667b9cb6d-pod-info\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.349881 4935 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.349890 4935 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.349900 4935 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-server-conf\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.349924 4935 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.352230 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-config-data" (OuterVolumeSpecName: "config-data") pod "fce93449-11d7-490f-9456-8f8667b9cb6d" (UID: "fce93449-11d7-490f-9456-8f8667b9cb6d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.391671 4935 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.425364 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "fce93449-11d7-490f-9456-8f8667b9cb6d" (UID: "fce93449-11d7-490f-9456-8f8667b9cb6d"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.452194 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fce93449-11d7-490f-9456-8f8667b9cb6d-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.452231 4935 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fce93449-11d7-490f-9456-8f8667b9cb6d-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.452245 4935 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.947941 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"fce93449-11d7-490f-9456-8f8667b9cb6d","Type":"ContainerDied","Data":"95b0feb6cbea065497eb5d4b14056c5ac69ebf59a54d666f3349991bafc78904"} Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.948020 4935 scope.go:117] "RemoveContainer" containerID="a6b083bbcbc548037c8291d97d5dd01282c2a97b1d1b9a9866a7df79c9eed3e9" Dec 01 18:59:05 crc kubenswrapper[4935]: I1201 18:59:05.948053 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.011396 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 18:59:06 crc kubenswrapper[4935]: E1201 18:59:06.022586 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested" Dec 01 18:59:06 crc kubenswrapper[4935]: E1201 18:59:06.022644 4935 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested" Dec 01 18:59:06 crc kubenswrapper[4935]: E1201 18:59:06.022777 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5d7h57fh58ch99h675h5b5hf8hb4h9h67ch4h559h4h89h85h5dh656h5b9h5f8h59fh78h596h549h5d5h644h9fh58fh66h64fhbbhc6h79q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-75lqf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(0f43eed1-cea2-4621-8fec-f55587776177): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.035050 4935 scope.go:117] "RemoveContainer" containerID="4436d0a1aa0b94abbc6edba1f66766705f930f3895beeda29b89c6f73d2deabb" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.046772 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.071300 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 18:59:06 crc kubenswrapper[4935]: E1201 18:59:06.071983 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fce93449-11d7-490f-9456-8f8667b9cb6d" containerName="setup-container" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.071999 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="fce93449-11d7-490f-9456-8f8667b9cb6d" containerName="setup-container" Dec 01 18:59:06 crc kubenswrapper[4935]: E1201 18:59:06.072021 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fce93449-11d7-490f-9456-8f8667b9cb6d" containerName="rabbitmq" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.072029 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="fce93449-11d7-490f-9456-8f8667b9cb6d" containerName="rabbitmq" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.072368 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="fce93449-11d7-490f-9456-8f8667b9cb6d" containerName="rabbitmq" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.074081 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.079616 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.079752 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-brpbv" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.079920 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.080047 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.080202 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.080308 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.080427 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.127597 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.140315 4935 scope.go:117] "RemoveContainer" containerID="4436d0a1aa0b94abbc6edba1f66766705f930f3895beeda29b89c6f73d2deabb" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.220125 4935 scope.go:117] "RemoveContainer" containerID="cdd47d7d3507285a9802d9145d260cc1b51ad07eb7cc6bc00073fda17ab2c6e2" Dec 01 18:59:06 crc kubenswrapper[4935]: E1201 18:59:06.224095 4935 log.go:32] "RemoveContainer from runtime service failed" err="rpc error: code = Unknown desc = failed to delete container k8s_setup-container_rabbitmq-cell1-server-0_openstack_fce93449-11d7-490f-9456-8f8667b9cb6d_0 in pod sandbox 95b0feb6cbea065497eb5d4b14056c5ac69ebf59a54d666f3349991bafc78904 from index: no such id: '4436d0a1aa0b94abbc6edba1f66766705f930f3895beeda29b89c6f73d2deabb'" containerID="4436d0a1aa0b94abbc6edba1f66766705f930f3895beeda29b89c6f73d2deabb" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.224136 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4436d0a1aa0b94abbc6edba1f66766705f930f3895beeda29b89c6f73d2deabb"} err="rpc error: code = Unknown desc = failed to delete container k8s_setup-container_rabbitmq-cell1-server-0_openstack_fce93449-11d7-490f-9456-8f8667b9cb6d_0 in pod sandbox 95b0feb6cbea065497eb5d4b14056c5ac69ebf59a54d666f3349991bafc78904 from index: no such id: '4436d0a1aa0b94abbc6edba1f66766705f930f3895beeda29b89c6f73d2deabb'" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.282642 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.282695 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.282727 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.282751 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.282776 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qxws\" (UniqueName: \"kubernetes.io/projected/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-kube-api-access-6qxws\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.282796 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.282824 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.282885 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.282955 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.282979 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.283024 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.388809 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.389198 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.389224 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.389268 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.389300 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.389324 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.389350 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.389372 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.389395 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qxws\" (UniqueName: \"kubernetes.io/projected/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-kube-api-access-6qxws\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.389416 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.389440 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.391464 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.392721 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.392955 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.393230 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.393436 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.394628 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.395761 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.401029 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.401575 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.408672 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.412437 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qxws\" (UniqueName: \"kubernetes.io/projected/4ba77f4b-156c-4d2e-9335-dab14bf1dcb3-kube-api-access-6qxws\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.437361 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.458932 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.527570 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fce93449-11d7-490f-9456-8f8667b9cb6d" path="/var/lib/kubelet/pods/fce93449-11d7-490f-9456-8f8667b9cb6d/volumes" Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.816774 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.830379 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b75489c6f-hsqm4"] Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.959857 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" event={"ID":"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f","Type":"ContainerStarted","Data":"aff9968ba95e2e903e28e86833f94d8ae8168f5f46b5d4dacd2682ffa186d107"} Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.963096 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0f43eed1-cea2-4621-8fec-f55587776177","Type":"ContainerStarted","Data":"4c36948272a5af4d2de4cd16dc4afff7ce3fc373b0fdd8c5b3b83a98e45bc287"} Dec 01 18:59:06 crc kubenswrapper[4935]: I1201 18:59:06.964348 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"428c8cdb-5fa3-4a5e-b249-1bb3713220a4","Type":"ContainerStarted","Data":"c43022378cf90ff11e6f77d8c9defaa583eda0c034fe0f190bcf269b580cd1c0"} Dec 01 18:59:07 crc kubenswrapper[4935]: I1201 18:59:07.020784 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 18:59:07 crc kubenswrapper[4935]: I1201 18:59:07.975599 4935 generic.go:334] "Generic (PLEG): container finished" podID="9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" containerID="f306b1f96e00ed07f64e5921a09db47f368b0efed302979522d909a057c7054f" exitCode=0 Dec 01 18:59:07 crc kubenswrapper[4935]: I1201 18:59:07.975655 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" event={"ID":"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f","Type":"ContainerDied","Data":"f306b1f96e00ed07f64e5921a09db47f368b0efed302979522d909a057c7054f"} Dec 01 18:59:07 crc kubenswrapper[4935]: I1201 18:59:07.978534 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0f43eed1-cea2-4621-8fec-f55587776177","Type":"ContainerStarted","Data":"07c3a79b379105d76c46a67a60be71b9202533fa55af5e11819537ef70493fb3"} Dec 01 18:59:07 crc kubenswrapper[4935]: I1201 18:59:07.980217 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3","Type":"ContainerStarted","Data":"b760c972c4fcd9d62134f49e18529857fe6da3394542106421cdabbbe8d0753b"} Dec 01 18:59:08 crc kubenswrapper[4935]: I1201 18:59:08.993537 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"428c8cdb-5fa3-4a5e-b249-1bb3713220a4","Type":"ContainerStarted","Data":"e2875c810d06f3da2905c641e378e98c672e923e586bdf7fce7cc026f1f182cb"} Dec 01 18:59:08 crc kubenswrapper[4935]: I1201 18:59:08.998251 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" event={"ID":"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f","Type":"ContainerStarted","Data":"7d78b4548013b63934ba27283ac36b7050b1694c0d0d9b810f2a0322e4850032"} Dec 01 18:59:08 crc kubenswrapper[4935]: I1201 18:59:08.998535 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:59:09 crc kubenswrapper[4935]: I1201 18:59:09.059940 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" podStartSLOduration=12.059920559 podStartE2EDuration="12.059920559s" podCreationTimestamp="2025-12-01 18:58:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:59:09.05299018 +0000 UTC m=+1763.074619439" watchObservedRunningTime="2025-12-01 18:59:09.059920559 +0000 UTC m=+1763.081549818" Dec 01 18:59:09 crc kubenswrapper[4935]: E1201 18:59:09.933731 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="0f43eed1-cea2-4621-8fec-f55587776177" Dec 01 18:59:10 crc kubenswrapper[4935]: I1201 18:59:10.020363 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0f43eed1-cea2-4621-8fec-f55587776177","Type":"ContainerStarted","Data":"df37918c9545d494bba00e31a53176356ada5767f40ec22d204da5a4a48a7660"} Dec 01 18:59:10 crc kubenswrapper[4935]: I1201 18:59:10.020796 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 18:59:10 crc kubenswrapper[4935]: E1201 18:59:10.021896 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested\\\"\"" pod="openstack/ceilometer-0" podUID="0f43eed1-cea2-4621-8fec-f55587776177" Dec 01 18:59:10 crc kubenswrapper[4935]: I1201 18:59:10.023556 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3","Type":"ContainerStarted","Data":"8b84f0c1d070b1126b2bf15075c6f071a5651defddc930cebf5fabdb23510bca"} Dec 01 18:59:11 crc kubenswrapper[4935]: E1201 18:59:11.066786 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested\\\"\"" pod="openstack/ceilometer-0" podUID="0f43eed1-cea2-4621-8fec-f55587776177" Dec 01 18:59:17 crc kubenswrapper[4935]: I1201 18:59:17.163135 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-6rt65" event={"ID":"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749","Type":"ContainerStarted","Data":"08aab24b030bba234bd9802bbe926ae9c03d99818aa74748d8eff364d6319485"} Dec 01 18:59:17 crc kubenswrapper[4935]: I1201 18:59:17.192713 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-6rt65" podStartSLOduration=1.854352676 podStartE2EDuration="39.192691401s" podCreationTimestamp="2025-12-01 18:58:38 +0000 UTC" firstStartedPulling="2025-12-01 18:58:39.407932763 +0000 UTC m=+1733.429562012" lastFinishedPulling="2025-12-01 18:59:16.746271478 +0000 UTC m=+1770.767900737" observedRunningTime="2025-12-01 18:59:17.182607073 +0000 UTC m=+1771.204236342" watchObservedRunningTime="2025-12-01 18:59:17.192691401 +0000 UTC m=+1771.214320670" Dec 01 18:59:17 crc kubenswrapper[4935]: I1201 18:59:17.827566 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:59:17 crc kubenswrapper[4935]: I1201 18:59:17.919723 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f84f9ccf-6ht5k"] Dec 01 18:59:17 crc kubenswrapper[4935]: I1201 18:59:17.920537 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" podUID="31708c3f-f9b6-44bd-8d81-cee4bc817f49" containerName="dnsmasq-dns" containerID="cri-o://a41aead54c79546f03894f72d0f8b7afbfd8b5220d945ac3bbb5b7fd7dbe5bbc" gracePeriod=10 Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.152908 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d75f767dc-nxgr9"] Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.155642 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.227199 4935 generic.go:334] "Generic (PLEG): container finished" podID="31708c3f-f9b6-44bd-8d81-cee4bc817f49" containerID="a41aead54c79546f03894f72d0f8b7afbfd8b5220d945ac3bbb5b7fd7dbe5bbc" exitCode=0 Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.227242 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" event={"ID":"31708c3f-f9b6-44bd-8d81-cee4bc817f49","Type":"ContainerDied","Data":"a41aead54c79546f03894f72d0f8b7afbfd8b5220d945ac3bbb5b7fd7dbe5bbc"} Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.258851 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d75f767dc-nxgr9"] Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.281549 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-dns-svc\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.281636 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-dns-swift-storage-0\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.281673 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-openstack-edpm-ipam\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.281730 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-ovsdbserver-sb\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.281754 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-config\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.281827 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc2xd\" (UniqueName: \"kubernetes.io/projected/d4211a91-6935-4af5-8eb9-a3941c9b5293-kube-api-access-wc2xd\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.281852 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-ovsdbserver-nb\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.383466 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-dns-svc\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.383782 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-dns-swift-storage-0\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.383832 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-openstack-edpm-ipam\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.383917 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-ovsdbserver-sb\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.383940 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-config\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.384027 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc2xd\" (UniqueName: \"kubernetes.io/projected/d4211a91-6935-4af5-8eb9-a3941c9b5293-kube-api-access-wc2xd\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.384063 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-ovsdbserver-nb\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.384240 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-dns-svc\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.384780 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-ovsdbserver-sb\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.385138 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-ovsdbserver-nb\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.385316 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-dns-swift-storage-0\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.385844 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-config\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.385991 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d4211a91-6935-4af5-8eb9-a3941c9b5293-openstack-edpm-ipam\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.439868 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc2xd\" (UniqueName: \"kubernetes.io/projected/d4211a91-6935-4af5-8eb9-a3941c9b5293-kube-api-access-wc2xd\") pod \"dnsmasq-dns-5d75f767dc-nxgr9\" (UID: \"d4211a91-6935-4af5-8eb9-a3941c9b5293\") " pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.524691 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.690706 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.795402 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-ovsdbserver-sb\") pod \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.795488 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-dns-svc\") pod \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.795548 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jt8n7\" (UniqueName: \"kubernetes.io/projected/31708c3f-f9b6-44bd-8d81-cee4bc817f49-kube-api-access-jt8n7\") pod \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.795605 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-dns-swift-storage-0\") pod \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.795622 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-config\") pod \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.795722 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-ovsdbserver-nb\") pod \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.803169 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31708c3f-f9b6-44bd-8d81-cee4bc817f49-kube-api-access-jt8n7" (OuterVolumeSpecName: "kube-api-access-jt8n7") pod "31708c3f-f9b6-44bd-8d81-cee4bc817f49" (UID: "31708c3f-f9b6-44bd-8d81-cee4bc817f49"). InnerVolumeSpecName "kube-api-access-jt8n7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.892875 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "31708c3f-f9b6-44bd-8d81-cee4bc817f49" (UID: "31708c3f-f9b6-44bd-8d81-cee4bc817f49"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.898370 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jt8n7\" (UniqueName: \"kubernetes.io/projected/31708c3f-f9b6-44bd-8d81-cee4bc817f49-kube-api-access-jt8n7\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.898401 4935 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.908523 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "31708c3f-f9b6-44bd-8d81-cee4bc817f49" (UID: "31708c3f-f9b6-44bd-8d81-cee4bc817f49"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.915308 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "31708c3f-f9b6-44bd-8d81-cee4bc817f49" (UID: "31708c3f-f9b6-44bd-8d81-cee4bc817f49"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:18 crc kubenswrapper[4935]: E1201 18:59:18.927037 4935 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-config podName:31708c3f-f9b6-44bd-8d81-cee4bc817f49 nodeName:}" failed. No retries permitted until 2025-12-01 18:59:19.427008774 +0000 UTC m=+1773.448638033 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config" (UniqueName: "kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-config") pod "31708c3f-f9b6-44bd-8d81-cee4bc817f49" (UID: "31708c3f-f9b6-44bd-8d81-cee4bc817f49") : error deleting /var/lib/kubelet/pods/31708c3f-f9b6-44bd-8d81-cee4bc817f49/volume-subpaths: remove /var/lib/kubelet/pods/31708c3f-f9b6-44bd-8d81-cee4bc817f49/volume-subpaths: no such file or directory Dec 01 18:59:18 crc kubenswrapper[4935]: I1201 18:59:18.927252 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "31708c3f-f9b6-44bd-8d81-cee4bc817f49" (UID: "31708c3f-f9b6-44bd-8d81-cee4bc817f49"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.001036 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.001104 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.001115 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.048238 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d75f767dc-nxgr9"] Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.237899 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" event={"ID":"d4211a91-6935-4af5-8eb9-a3941c9b5293","Type":"ContainerStarted","Data":"439be1daf7b2d18fdb2ed6b82012705a35ee76adc94a193b2a4eb46dcc066bf6"} Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.240751 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" event={"ID":"31708c3f-f9b6-44bd-8d81-cee4bc817f49","Type":"ContainerDied","Data":"c34269e63c0256e13f44f2a5051289d5603dfc7dc8d40bc02456bbea6441a514"} Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.240798 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84f9ccf-6ht5k" Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.240825 4935 scope.go:117] "RemoveContainer" containerID="a41aead54c79546f03894f72d0f8b7afbfd8b5220d945ac3bbb5b7fd7dbe5bbc" Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.269556 4935 scope.go:117] "RemoveContainer" containerID="9bf5d108cd2b7f51042cd99fcfb70b422597ab82bc70fea22542ea046dafe726" Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.507815 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:59:19 crc kubenswrapper[4935]: E1201 18:59:19.508311 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.511747 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-config\") pod \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\" (UID: \"31708c3f-f9b6-44bd-8d81-cee4bc817f49\") " Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.512177 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-config" (OuterVolumeSpecName: "config") pod "31708c3f-f9b6-44bd-8d81-cee4bc817f49" (UID: "31708c3f-f9b6-44bd-8d81-cee4bc817f49"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.512829 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31708c3f-f9b6-44bd-8d81-cee4bc817f49-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.579851 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f84f9ccf-6ht5k"] Dec 01 18:59:19 crc kubenswrapper[4935]: I1201 18:59:19.595033 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f84f9ccf-6ht5k"] Dec 01 18:59:20 crc kubenswrapper[4935]: I1201 18:59:20.257480 4935 generic.go:334] "Generic (PLEG): container finished" podID="d4211a91-6935-4af5-8eb9-a3941c9b5293" containerID="86afc26311ac86afe90c74961a241431ab83f5ec110eb4d0634df6bae9905845" exitCode=0 Dec 01 18:59:20 crc kubenswrapper[4935]: I1201 18:59:20.257563 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" event={"ID":"d4211a91-6935-4af5-8eb9-a3941c9b5293","Type":"ContainerDied","Data":"86afc26311ac86afe90c74961a241431ab83f5ec110eb4d0634df6bae9905845"} Dec 01 18:59:20 crc kubenswrapper[4935]: I1201 18:59:20.261709 4935 generic.go:334] "Generic (PLEG): container finished" podID="25a03a1c-18c2-4f1d-96bf-cbb5bdae3749" containerID="08aab24b030bba234bd9802bbe926ae9c03d99818aa74748d8eff364d6319485" exitCode=0 Dec 01 18:59:20 crc kubenswrapper[4935]: I1201 18:59:20.261804 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-6rt65" event={"ID":"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749","Type":"ContainerDied","Data":"08aab24b030bba234bd9802bbe926ae9c03d99818aa74748d8eff364d6319485"} Dec 01 18:59:20 crc kubenswrapper[4935]: I1201 18:59:20.526481 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31708c3f-f9b6-44bd-8d81-cee4bc817f49" path="/var/lib/kubelet/pods/31708c3f-f9b6-44bd-8d81-cee4bc817f49/volumes" Dec 01 18:59:21 crc kubenswrapper[4935]: I1201 18:59:21.287254 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" event={"ID":"d4211a91-6935-4af5-8eb9-a3941c9b5293","Type":"ContainerStarted","Data":"26e0e70b9c89293f44a2be9a23681d566d7dabe261772642701c5c34c57c98cf"} Dec 01 18:59:21 crc kubenswrapper[4935]: I1201 18:59:21.287685 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:21 crc kubenswrapper[4935]: I1201 18:59:21.317483 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" podStartSLOduration=3.317454626 podStartE2EDuration="3.317454626s" podCreationTimestamp="2025-12-01 18:59:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:59:21.311793877 +0000 UTC m=+1775.333423146" watchObservedRunningTime="2025-12-01 18:59:21.317454626 +0000 UTC m=+1775.339083885" Dec 01 18:59:21 crc kubenswrapper[4935]: I1201 18:59:21.777370 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-6rt65" Dec 01 18:59:21 crc kubenswrapper[4935]: I1201 18:59:21.874474 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-config-data\") pod \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\" (UID: \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\") " Dec 01 18:59:21 crc kubenswrapper[4935]: I1201 18:59:21.874749 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-combined-ca-bundle\") pod \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\" (UID: \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\") " Dec 01 18:59:21 crc kubenswrapper[4935]: I1201 18:59:21.874869 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jljc9\" (UniqueName: \"kubernetes.io/projected/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-kube-api-access-jljc9\") pod \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\" (UID: \"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749\") " Dec 01 18:59:21 crc kubenswrapper[4935]: I1201 18:59:21.885034 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-kube-api-access-jljc9" (OuterVolumeSpecName: "kube-api-access-jljc9") pod "25a03a1c-18c2-4f1d-96bf-cbb5bdae3749" (UID: "25a03a1c-18c2-4f1d-96bf-cbb5bdae3749"). InnerVolumeSpecName "kube-api-access-jljc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:59:21 crc kubenswrapper[4935]: I1201 18:59:21.915370 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25a03a1c-18c2-4f1d-96bf-cbb5bdae3749" (UID: "25a03a1c-18c2-4f1d-96bf-cbb5bdae3749"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:21 crc kubenswrapper[4935]: I1201 18:59:21.977770 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:21 crc kubenswrapper[4935]: I1201 18:59:21.978003 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jljc9\" (UniqueName: \"kubernetes.io/projected/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-kube-api-access-jljc9\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:22 crc kubenswrapper[4935]: I1201 18:59:22.013172 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-config-data" (OuterVolumeSpecName: "config-data") pod "25a03a1c-18c2-4f1d-96bf-cbb5bdae3749" (UID: "25a03a1c-18c2-4f1d-96bf-cbb5bdae3749"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:22 crc kubenswrapper[4935]: I1201 18:59:22.080306 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:22 crc kubenswrapper[4935]: I1201 18:59:22.304951 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-6rt65" event={"ID":"25a03a1c-18c2-4f1d-96bf-cbb5bdae3749","Type":"ContainerDied","Data":"2cc760d49153b71e05c2665a3d3c9154be9012e40d690ad0d78e7e7a43c93ac0"} Dec 01 18:59:22 crc kubenswrapper[4935]: I1201 18:59:22.305014 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-6rt65" Dec 01 18:59:22 crc kubenswrapper[4935]: I1201 18:59:22.305029 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cc760d49153b71e05c2665a3d3c9154be9012e40d690ad0d78e7e7a43c93ac0" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.353248 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-6bfbdbbb99-fln9r"] Dec 01 18:59:23 crc kubenswrapper[4935]: E1201 18:59:23.354306 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25a03a1c-18c2-4f1d-96bf-cbb5bdae3749" containerName="heat-db-sync" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.354328 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="25a03a1c-18c2-4f1d-96bf-cbb5bdae3749" containerName="heat-db-sync" Dec 01 18:59:23 crc kubenswrapper[4935]: E1201 18:59:23.354425 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31708c3f-f9b6-44bd-8d81-cee4bc817f49" containerName="dnsmasq-dns" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.354438 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="31708c3f-f9b6-44bd-8d81-cee4bc817f49" containerName="dnsmasq-dns" Dec 01 18:59:23 crc kubenswrapper[4935]: E1201 18:59:23.354471 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31708c3f-f9b6-44bd-8d81-cee4bc817f49" containerName="init" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.354484 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="31708c3f-f9b6-44bd-8d81-cee4bc817f49" containerName="init" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.354857 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="25a03a1c-18c2-4f1d-96bf-cbb5bdae3749" containerName="heat-db-sync" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.354891 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="31708c3f-f9b6-44bd-8d81-cee4bc817f49" containerName="dnsmasq-dns" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.356414 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.366900 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6bfbdbbb99-fln9r"] Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.472060 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-796bbb8f66-6s68g"] Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.474452 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.497626 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-796bbb8f66-6s68g"] Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.519636 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfdkg\" (UniqueName: \"kubernetes.io/projected/b5fc477d-3538-47ad-ae75-b9053d6eb06f-kube-api-access-cfdkg\") pod \"heat-engine-6bfbdbbb99-fln9r\" (UID: \"b5fc477d-3538-47ad-ae75-b9053d6eb06f\") " pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.519740 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5fc477d-3538-47ad-ae75-b9053d6eb06f-config-data-custom\") pod \"heat-engine-6bfbdbbb99-fln9r\" (UID: \"b5fc477d-3538-47ad-ae75-b9053d6eb06f\") " pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.519933 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5fc477d-3538-47ad-ae75-b9053d6eb06f-config-data\") pod \"heat-engine-6bfbdbbb99-fln9r\" (UID: \"b5fc477d-3538-47ad-ae75-b9053d6eb06f\") " pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.520137 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5fc477d-3538-47ad-ae75-b9053d6eb06f-combined-ca-bundle\") pod \"heat-engine-6bfbdbbb99-fln9r\" (UID: \"b5fc477d-3538-47ad-ae75-b9053d6eb06f\") " pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.521348 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-659767c5f-48fgm"] Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.528974 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.559287 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-659767c5f-48fgm"] Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.624188 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-combined-ca-bundle\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.624262 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-public-tls-certs\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.624317 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5fc477d-3538-47ad-ae75-b9053d6eb06f-config-data\") pod \"heat-engine-6bfbdbbb99-fln9r\" (UID: \"b5fc477d-3538-47ad-ae75-b9053d6eb06f\") " pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.624466 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5fc477d-3538-47ad-ae75-b9053d6eb06f-combined-ca-bundle\") pod \"heat-engine-6bfbdbbb99-fln9r\" (UID: \"b5fc477d-3538-47ad-ae75-b9053d6eb06f\") " pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.624514 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-combined-ca-bundle\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.625284 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzcxl\" (UniqueName: \"kubernetes.io/projected/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-kube-api-access-wzcxl\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.625437 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwzgn\" (UniqueName: \"kubernetes.io/projected/4695e9f4-6780-4105-a952-7e00df3e9f05-kube-api-access-lwzgn\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.625505 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-internal-tls-certs\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.625617 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-config-data-custom\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.625682 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-config-data-custom\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.625727 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-internal-tls-certs\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.625779 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfdkg\" (UniqueName: \"kubernetes.io/projected/b5fc477d-3538-47ad-ae75-b9053d6eb06f-kube-api-access-cfdkg\") pod \"heat-engine-6bfbdbbb99-fln9r\" (UID: \"b5fc477d-3538-47ad-ae75-b9053d6eb06f\") " pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.625813 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-config-data\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.626503 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-config-data\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.626776 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5fc477d-3538-47ad-ae75-b9053d6eb06f-config-data-custom\") pod \"heat-engine-6bfbdbbb99-fln9r\" (UID: \"b5fc477d-3538-47ad-ae75-b9053d6eb06f\") " pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.627685 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-public-tls-certs\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.629934 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5fc477d-3538-47ad-ae75-b9053d6eb06f-combined-ca-bundle\") pod \"heat-engine-6bfbdbbb99-fln9r\" (UID: \"b5fc477d-3538-47ad-ae75-b9053d6eb06f\") " pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.630367 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5fc477d-3538-47ad-ae75-b9053d6eb06f-config-data\") pod \"heat-engine-6bfbdbbb99-fln9r\" (UID: \"b5fc477d-3538-47ad-ae75-b9053d6eb06f\") " pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.632742 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5fc477d-3538-47ad-ae75-b9053d6eb06f-config-data-custom\") pod \"heat-engine-6bfbdbbb99-fln9r\" (UID: \"b5fc477d-3538-47ad-ae75-b9053d6eb06f\") " pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.651818 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfdkg\" (UniqueName: \"kubernetes.io/projected/b5fc477d-3538-47ad-ae75-b9053d6eb06f-kube-api-access-cfdkg\") pod \"heat-engine-6bfbdbbb99-fln9r\" (UID: \"b5fc477d-3538-47ad-ae75-b9053d6eb06f\") " pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.712981 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.729691 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-public-tls-certs\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.729741 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-combined-ca-bundle\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.729770 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-public-tls-certs\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.729824 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-combined-ca-bundle\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.729849 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzcxl\" (UniqueName: \"kubernetes.io/projected/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-kube-api-access-wzcxl\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.729895 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwzgn\" (UniqueName: \"kubernetes.io/projected/4695e9f4-6780-4105-a952-7e00df3e9f05-kube-api-access-lwzgn\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.729924 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-internal-tls-certs\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.729955 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-config-data-custom\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.729981 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-config-data-custom\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.730004 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-internal-tls-certs\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.730023 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-config-data\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.730060 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-config-data\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.734127 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-internal-tls-certs\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.734417 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-public-tls-certs\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.735092 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-config-data\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.735512 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-combined-ca-bundle\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.737627 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-config-data-custom\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.738091 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-config-data\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.738814 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-public-tls-certs\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.745294 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-internal-tls-certs\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.745495 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4695e9f4-6780-4105-a952-7e00df3e9f05-config-data-custom\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.750008 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-combined-ca-bundle\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.755637 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzcxl\" (UniqueName: \"kubernetes.io/projected/557d94df-ef5e-4a1e-9f9e-df761e2d6cb2-kube-api-access-wzcxl\") pod \"heat-api-659767c5f-48fgm\" (UID: \"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2\") " pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.757455 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwzgn\" (UniqueName: \"kubernetes.io/projected/4695e9f4-6780-4105-a952-7e00df3e9f05-kube-api-access-lwzgn\") pod \"heat-cfnapi-796bbb8f66-6s68g\" (UID: \"4695e9f4-6780-4105-a952-7e00df3e9f05\") " pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.793999 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:23 crc kubenswrapper[4935]: I1201 18:59:23.866599 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:24 crc kubenswrapper[4935]: I1201 18:59:24.202479 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6bfbdbbb99-fln9r"] Dec 01 18:59:24 crc kubenswrapper[4935]: I1201 18:59:24.352034 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6bfbdbbb99-fln9r" event={"ID":"b5fc477d-3538-47ad-ae75-b9053d6eb06f","Type":"ContainerStarted","Data":"355b7ce225fa9ca0a0f62fa855d4c3c5f21a454f5778071aa722cec48e613b61"} Dec 01 18:59:24 crc kubenswrapper[4935]: I1201 18:59:24.354468 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-796bbb8f66-6s68g"] Dec 01 18:59:24 crc kubenswrapper[4935]: W1201 18:59:24.479604 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod557d94df_ef5e_4a1e_9f9e_df761e2d6cb2.slice/crio-07b89b55c80e47389f2f776a21cb4f6d9be735982f6e85997af0059314c7e7e0 WatchSource:0}: Error finding container 07b89b55c80e47389f2f776a21cb4f6d9be735982f6e85997af0059314c7e7e0: Status 404 returned error can't find the container with id 07b89b55c80e47389f2f776a21cb4f6d9be735982f6e85997af0059314c7e7e0 Dec 01 18:59:24 crc kubenswrapper[4935]: I1201 18:59:24.487710 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-659767c5f-48fgm"] Dec 01 18:59:24 crc kubenswrapper[4935]: I1201 18:59:24.530748 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 18:59:25 crc kubenswrapper[4935]: I1201 18:59:25.362607 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-659767c5f-48fgm" event={"ID":"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2","Type":"ContainerStarted","Data":"07b89b55c80e47389f2f776a21cb4f6d9be735982f6e85997af0059314c7e7e0"} Dec 01 18:59:25 crc kubenswrapper[4935]: I1201 18:59:25.365380 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6bfbdbbb99-fln9r" event={"ID":"b5fc477d-3538-47ad-ae75-b9053d6eb06f","Type":"ContainerStarted","Data":"f1657fb357063768c1b4541c43b9a39a17fea342465ea2404014141fcea2a3a1"} Dec 01 18:59:25 crc kubenswrapper[4935]: I1201 18:59:25.366748 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:25 crc kubenswrapper[4935]: I1201 18:59:25.370751 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-796bbb8f66-6s68g" event={"ID":"4695e9f4-6780-4105-a952-7e00df3e9f05","Type":"ContainerStarted","Data":"0beeb831a54af3c1e0d8a7daaf02a498b0370cca85992002f066145d034646e8"} Dec 01 18:59:25 crc kubenswrapper[4935]: I1201 18:59:25.372965 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0f43eed1-cea2-4621-8fec-f55587776177","Type":"ContainerStarted","Data":"5e6ad1c62b5de0ab46f92bba30e8e74196cda551fd77f1175279faef7f41e0fa"} Dec 01 18:59:25 crc kubenswrapper[4935]: I1201 18:59:25.397246 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-6bfbdbbb99-fln9r" podStartSLOduration=2.397227144 podStartE2EDuration="2.397227144s" podCreationTimestamp="2025-12-01 18:59:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:59:25.382920323 +0000 UTC m=+1779.404549582" watchObservedRunningTime="2025-12-01 18:59:25.397227144 +0000 UTC m=+1779.418856403" Dec 01 18:59:25 crc kubenswrapper[4935]: I1201 18:59:25.412673 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.314740873 podStartE2EDuration="43.412653349s" podCreationTimestamp="2025-12-01 18:58:42 +0000 UTC" firstStartedPulling="2025-12-01 18:58:43.628782125 +0000 UTC m=+1737.650411384" lastFinishedPulling="2025-12-01 18:59:24.726694601 +0000 UTC m=+1778.748323860" observedRunningTime="2025-12-01 18:59:25.407335522 +0000 UTC m=+1779.428964781" watchObservedRunningTime="2025-12-01 18:59:25.412653349 +0000 UTC m=+1779.434282608" Dec 01 18:59:26 crc kubenswrapper[4935]: I1201 18:59:26.387668 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-796bbb8f66-6s68g" event={"ID":"4695e9f4-6780-4105-a952-7e00df3e9f05","Type":"ContainerStarted","Data":"b7954660c6e028149b6a7ea5cda73b9a827fcf397819f2a80f479398569931ca"} Dec 01 18:59:26 crc kubenswrapper[4935]: I1201 18:59:26.388296 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:26 crc kubenswrapper[4935]: I1201 18:59:26.389400 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-659767c5f-48fgm" event={"ID":"557d94df-ef5e-4a1e-9f9e-df761e2d6cb2","Type":"ContainerStarted","Data":"ee760119218c908d4ecd6cc15b06e8bad8ac65fc7aee90272e204c56519d002f"} Dec 01 18:59:26 crc kubenswrapper[4935]: I1201 18:59:26.422292 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-796bbb8f66-6s68g" podStartSLOduration=1.955664025 podStartE2EDuration="3.422270874s" podCreationTimestamp="2025-12-01 18:59:23 +0000 UTC" firstStartedPulling="2025-12-01 18:59:24.338755161 +0000 UTC m=+1778.360384440" lastFinishedPulling="2025-12-01 18:59:25.80536204 +0000 UTC m=+1779.826991289" observedRunningTime="2025-12-01 18:59:26.404518505 +0000 UTC m=+1780.426147764" watchObservedRunningTime="2025-12-01 18:59:26.422270874 +0000 UTC m=+1780.443900123" Dec 01 18:59:26 crc kubenswrapper[4935]: I1201 18:59:26.451325 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-659767c5f-48fgm" podStartSLOduration=2.124690439 podStartE2EDuration="3.451304658s" podCreationTimestamp="2025-12-01 18:59:23 +0000 UTC" firstStartedPulling="2025-12-01 18:59:24.482795799 +0000 UTC m=+1778.504425048" lastFinishedPulling="2025-12-01 18:59:25.809410008 +0000 UTC m=+1779.831039267" observedRunningTime="2025-12-01 18:59:26.428796759 +0000 UTC m=+1780.450426018" watchObservedRunningTime="2025-12-01 18:59:26.451304658 +0000 UTC m=+1780.472933917" Dec 01 18:59:27 crc kubenswrapper[4935]: I1201 18:59:27.404539 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:28 crc kubenswrapper[4935]: I1201 18:59:28.531015 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d75f767dc-nxgr9" Dec 01 18:59:28 crc kubenswrapper[4935]: I1201 18:59:28.608853 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b75489c6f-hsqm4"] Dec 01 18:59:28 crc kubenswrapper[4935]: I1201 18:59:28.611542 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" podUID="9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" containerName="dnsmasq-dns" containerID="cri-o://7d78b4548013b63934ba27283ac36b7050b1694c0d0d9b810f2a0322e4850032" gracePeriod=10 Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.211614 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.292347 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-ovsdbserver-sb\") pod \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.292607 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dbd5\" (UniqueName: \"kubernetes.io/projected/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-kube-api-access-2dbd5\") pod \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.292736 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-ovsdbserver-nb\") pod \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.293008 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-openstack-edpm-ipam\") pod \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.293109 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-config\") pod \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.293287 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-svc\") pod \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.293383 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-swift-storage-0\") pod \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.324120 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-kube-api-access-2dbd5" (OuterVolumeSpecName: "kube-api-access-2dbd5") pod "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" (UID: "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f"). InnerVolumeSpecName "kube-api-access-2dbd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.367018 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" (UID: "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.383095 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" (UID: "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.386386 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-config" (OuterVolumeSpecName: "config") pod "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" (UID: "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.397205 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" (UID: "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.401066 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-svc\") pod \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\" (UID: \"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f\") " Dec 01 18:59:29 crc kubenswrapper[4935]: W1201 18:59:29.401217 4935 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f/volumes/kubernetes.io~configmap/dns-svc Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.401243 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" (UID: "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.402537 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dbd5\" (UniqueName: \"kubernetes.io/projected/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-kube-api-access-2dbd5\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.402560 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.402682 4935 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-config\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.402713 4935 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.402822 4935 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.406786 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" (UID: "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.421334 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" (UID: "9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.430131 4935 generic.go:334] "Generic (PLEG): container finished" podID="9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" containerID="7d78b4548013b63934ba27283ac36b7050b1694c0d0d9b810f2a0322e4850032" exitCode=0 Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.430226 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" event={"ID":"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f","Type":"ContainerDied","Data":"7d78b4548013b63934ba27283ac36b7050b1694c0d0d9b810f2a0322e4850032"} Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.430256 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" event={"ID":"9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f","Type":"ContainerDied","Data":"aff9968ba95e2e903e28e86833f94d8ae8168f5f46b5d4dacd2682ffa186d107"} Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.430275 4935 scope.go:117] "RemoveContainer" containerID="7d78b4548013b63934ba27283ac36b7050b1694c0d0d9b810f2a0322e4850032" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.430434 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b75489c6f-hsqm4" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.462607 4935 scope.go:117] "RemoveContainer" containerID="f306b1f96e00ed07f64e5921a09db47f368b0efed302979522d909a057c7054f" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.477085 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b75489c6f-hsqm4"] Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.485379 4935 scope.go:117] "RemoveContainer" containerID="7d78b4548013b63934ba27283ac36b7050b1694c0d0d9b810f2a0322e4850032" Dec 01 18:59:29 crc kubenswrapper[4935]: E1201 18:59:29.486017 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d78b4548013b63934ba27283ac36b7050b1694c0d0d9b810f2a0322e4850032\": container with ID starting with 7d78b4548013b63934ba27283ac36b7050b1694c0d0d9b810f2a0322e4850032 not found: ID does not exist" containerID="7d78b4548013b63934ba27283ac36b7050b1694c0d0d9b810f2a0322e4850032" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.486129 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d78b4548013b63934ba27283ac36b7050b1694c0d0d9b810f2a0322e4850032"} err="failed to get container status \"7d78b4548013b63934ba27283ac36b7050b1694c0d0d9b810f2a0322e4850032\": rpc error: code = NotFound desc = could not find container \"7d78b4548013b63934ba27283ac36b7050b1694c0d0d9b810f2a0322e4850032\": container with ID starting with 7d78b4548013b63934ba27283ac36b7050b1694c0d0d9b810f2a0322e4850032 not found: ID does not exist" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.486266 4935 scope.go:117] "RemoveContainer" containerID="f306b1f96e00ed07f64e5921a09db47f368b0efed302979522d909a057c7054f" Dec 01 18:59:29 crc kubenswrapper[4935]: E1201 18:59:29.486666 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f306b1f96e00ed07f64e5921a09db47f368b0efed302979522d909a057c7054f\": container with ID starting with f306b1f96e00ed07f64e5921a09db47f368b0efed302979522d909a057c7054f not found: ID does not exist" containerID="f306b1f96e00ed07f64e5921a09db47f368b0efed302979522d909a057c7054f" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.486777 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f306b1f96e00ed07f64e5921a09db47f368b0efed302979522d909a057c7054f"} err="failed to get container status \"f306b1f96e00ed07f64e5921a09db47f368b0efed302979522d909a057c7054f\": rpc error: code = NotFound desc = could not find container \"f306b1f96e00ed07f64e5921a09db47f368b0efed302979522d909a057c7054f\": container with ID starting with f306b1f96e00ed07f64e5921a09db47f368b0efed302979522d909a057c7054f not found: ID does not exist" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.487420 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b75489c6f-hsqm4"] Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.505504 4935 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:29 crc kubenswrapper[4935]: I1201 18:59:29.505536 4935 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:30 crc kubenswrapper[4935]: I1201 18:59:30.510086 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:59:30 crc kubenswrapper[4935]: E1201 18:59:30.510399 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:59:30 crc kubenswrapper[4935]: I1201 18:59:30.529671 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" path="/var/lib/kubelet/pods/9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f/volumes" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.168511 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl"] Dec 01 18:59:33 crc kubenswrapper[4935]: E1201 18:59:33.169603 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" containerName="init" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.169619 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" containerName="init" Dec 01 18:59:33 crc kubenswrapper[4935]: E1201 18:59:33.169631 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" containerName="dnsmasq-dns" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.169639 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" containerName="dnsmasq-dns" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.170026 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f4a0d24-ec1c-48a6-8f86-66e7ad685d2f" containerName="dnsmasq-dns" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.171215 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.174059 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.174205 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.174302 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.174388 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.183734 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl"] Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.317962 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.318180 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.318264 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.318309 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pttsg\" (UniqueName: \"kubernetes.io/projected/499c1a41-8227-4cf9-8c15-99fd4a46f013-kube-api-access-pttsg\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.421048 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.421123 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pttsg\" (UniqueName: \"kubernetes.io/projected/499c1a41-8227-4cf9-8c15-99fd4a46f013-kube-api-access-pttsg\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.421315 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.421430 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.427859 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.428052 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.428883 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.444727 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pttsg\" (UniqueName: \"kubernetes.io/projected/499c1a41-8227-4cf9-8c15-99fd4a46f013-kube-api-access-pttsg\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:33 crc kubenswrapper[4935]: I1201 18:59:33.498470 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:34 crc kubenswrapper[4935]: I1201 18:59:34.261676 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl"] Dec 01 18:59:34 crc kubenswrapper[4935]: I1201 18:59:34.553993 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" event={"ID":"499c1a41-8227-4cf9-8c15-99fd4a46f013","Type":"ContainerStarted","Data":"1d16d61b026b34fa30c9ae316e87f7bc822a708781b2121e5851f338b4e4ad46"} Dec 01 18:59:35 crc kubenswrapper[4935]: I1201 18:59:35.228268 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-796bbb8f66-6s68g" Dec 01 18:59:35 crc kubenswrapper[4935]: I1201 18:59:35.315541 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-558b54d8fc-h7892"] Dec 01 18:59:35 crc kubenswrapper[4935]: I1201 18:59:35.317316 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-cfnapi-558b54d8fc-h7892" podUID="49f9e82b-286f-42b5-b006-5fe38a758159" containerName="heat-cfnapi" containerID="cri-o://87eedf06818ffbee7cca3a4762e8e3abf4ca9b45d83938b0b4af5c2111cf6ab6" gracePeriod=60 Dec 01 18:59:35 crc kubenswrapper[4935]: I1201 18:59:35.485747 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-659767c5f-48fgm" Dec 01 18:59:35 crc kubenswrapper[4935]: I1201 18:59:35.570336 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-f77cd874b-zfsrx"] Dec 01 18:59:35 crc kubenswrapper[4935]: I1201 18:59:35.570648 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-api-f77cd874b-zfsrx" podUID="cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" containerName="heat-api" containerID="cri-o://48e6e8c486a39d490586dc18a525bd27d5957a99d7e1e8ee9b4281613c2a6d32" gracePeriod=60 Dec 01 18:59:39 crc kubenswrapper[4935]: I1201 18:59:39.188469 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/heat-api-f77cd874b-zfsrx" podUID="cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" containerName="heat-api" probeResult="failure" output="Get \"https://10.217.0.215:8004/healthcheck\": dial tcp 10.217.0.215:8004: connect: connection refused" Dec 01 18:59:39 crc kubenswrapper[4935]: I1201 18:59:39.208571 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/heat-cfnapi-558b54d8fc-h7892" podUID="49f9e82b-286f-42b5-b006-5fe38a758159" containerName="heat-cfnapi" probeResult="failure" output="Get \"https://10.217.0.216:8000/healthcheck\": dial tcp 10.217.0.216:8000: connect: connection refused" Dec 01 18:59:39 crc kubenswrapper[4935]: I1201 18:59:39.617440 4935 generic.go:334] "Generic (PLEG): container finished" podID="49f9e82b-286f-42b5-b006-5fe38a758159" containerID="87eedf06818ffbee7cca3a4762e8e3abf4ca9b45d83938b0b4af5c2111cf6ab6" exitCode=0 Dec 01 18:59:39 crc kubenswrapper[4935]: I1201 18:59:39.617512 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-558b54d8fc-h7892" event={"ID":"49f9e82b-286f-42b5-b006-5fe38a758159","Type":"ContainerDied","Data":"87eedf06818ffbee7cca3a4762e8e3abf4ca9b45d83938b0b4af5c2111cf6ab6"} Dec 01 18:59:39 crc kubenswrapper[4935]: I1201 18:59:39.619008 4935 generic.go:334] "Generic (PLEG): container finished" podID="cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" containerID="48e6e8c486a39d490586dc18a525bd27d5957a99d7e1e8ee9b4281613c2a6d32" exitCode=0 Dec 01 18:59:39 crc kubenswrapper[4935]: I1201 18:59:39.619036 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-f77cd874b-zfsrx" event={"ID":"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa","Type":"ContainerDied","Data":"48e6e8c486a39d490586dc18a525bd27d5957a99d7e1e8ee9b4281613c2a6d32"} Dec 01 18:59:41 crc kubenswrapper[4935]: I1201 18:59:41.654610 4935 generic.go:334] "Generic (PLEG): container finished" podID="428c8cdb-5fa3-4a5e-b249-1bb3713220a4" containerID="e2875c810d06f3da2905c641e378e98c672e923e586bdf7fce7cc026f1f182cb" exitCode=0 Dec 01 18:59:41 crc kubenswrapper[4935]: I1201 18:59:41.654760 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"428c8cdb-5fa3-4a5e-b249-1bb3713220a4","Type":"ContainerDied","Data":"e2875c810d06f3da2905c641e378e98c672e923e586bdf7fce7cc026f1f182cb"} Dec 01 18:59:42 crc kubenswrapper[4935]: I1201 18:59:42.510121 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:59:42 crc kubenswrapper[4935]: E1201 18:59:42.510589 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:59:42 crc kubenswrapper[4935]: I1201 18:59:42.665975 4935 generic.go:334] "Generic (PLEG): container finished" podID="4ba77f4b-156c-4d2e-9335-dab14bf1dcb3" containerID="8b84f0c1d070b1126b2bf15075c6f071a5651defddc930cebf5fabdb23510bca" exitCode=0 Dec 01 18:59:42 crc kubenswrapper[4935]: I1201 18:59:42.666017 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3","Type":"ContainerDied","Data":"8b84f0c1d070b1126b2bf15075c6f071a5651defddc930cebf5fabdb23510bca"} Dec 01 18:59:43 crc kubenswrapper[4935]: I1201 18:59:43.785552 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-6bfbdbbb99-fln9r" Dec 01 18:59:43 crc kubenswrapper[4935]: I1201 18:59:43.858188 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-69cfbb4c64-24bqh"] Dec 01 18:59:43 crc kubenswrapper[4935]: I1201 18:59:43.858458 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-engine-69cfbb4c64-24bqh" podUID="ce0e14b6-6e7f-467a-bad9-9479311d6c89" containerName="heat-engine" containerID="cri-o://a6b4888dce55132e1517ff9655388ed60e5f97f8a462f38516f31e333f2ad680" gracePeriod=60 Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.268247 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.339063 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c65ql\" (UniqueName: \"kubernetes.io/projected/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-kube-api-access-c65ql\") pod \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.339107 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-config-data\") pod \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.339302 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-config-data-custom\") pod \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.339407 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-public-tls-certs\") pod \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.339527 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-internal-tls-certs\") pod \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.339564 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-combined-ca-bundle\") pod \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\" (UID: \"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa\") " Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.350267 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" (UID: "cf534e24-32d5-4b58-89d6-ddb49b1fc8aa"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.353799 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-kube-api-access-c65ql" (OuterVolumeSpecName: "kube-api-access-c65ql") pod "cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" (UID: "cf534e24-32d5-4b58-89d6-ddb49b1fc8aa"). InnerVolumeSpecName "kube-api-access-c65ql". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.366341 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.374000 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" (UID: "cf534e24-32d5-4b58-89d6-ddb49b1fc8aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.440921 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" (UID: "cf534e24-32d5-4b58-89d6-ddb49b1fc8aa"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.441255 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-combined-ca-bundle\") pod \"49f9e82b-286f-42b5-b006-5fe38a758159\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.441296 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-config-data\") pod \"49f9e82b-286f-42b5-b006-5fe38a758159\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.441335 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-public-tls-certs\") pod \"49f9e82b-286f-42b5-b006-5fe38a758159\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.441420 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxdzc\" (UniqueName: \"kubernetes.io/projected/49f9e82b-286f-42b5-b006-5fe38a758159-kube-api-access-qxdzc\") pod \"49f9e82b-286f-42b5-b006-5fe38a758159\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.441467 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-internal-tls-certs\") pod \"49f9e82b-286f-42b5-b006-5fe38a758159\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.441579 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-config-data-custom\") pod \"49f9e82b-286f-42b5-b006-5fe38a758159\" (UID: \"49f9e82b-286f-42b5-b006-5fe38a758159\") " Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.442230 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-config-data" (OuterVolumeSpecName: "config-data") pod "cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" (UID: "cf534e24-32d5-4b58-89d6-ddb49b1fc8aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.445335 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "49f9e82b-286f-42b5-b006-5fe38a758159" (UID: "49f9e82b-286f-42b5-b006-5fe38a758159"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.448261 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c65ql\" (UniqueName: \"kubernetes.io/projected/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-kube-api-access-c65ql\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.448346 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.448403 4935 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.448467 4935 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.448522 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.448573 4935 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.448313 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49f9e82b-286f-42b5-b006-5fe38a758159-kube-api-access-qxdzc" (OuterVolumeSpecName: "kube-api-access-qxdzc") pod "49f9e82b-286f-42b5-b006-5fe38a758159" (UID: "49f9e82b-286f-42b5-b006-5fe38a758159"). InnerVolumeSpecName "kube-api-access-qxdzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.484514 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "49f9e82b-286f-42b5-b006-5fe38a758159" (UID: "49f9e82b-286f-42b5-b006-5fe38a758159"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.492304 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" (UID: "cf534e24-32d5-4b58-89d6-ddb49b1fc8aa"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.517771 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "49f9e82b-286f-42b5-b006-5fe38a758159" (UID: "49f9e82b-286f-42b5-b006-5fe38a758159"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.544125 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-config-data" (OuterVolumeSpecName: "config-data") pod "49f9e82b-286f-42b5-b006-5fe38a758159" (UID: "49f9e82b-286f-42b5-b006-5fe38a758159"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.547279 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "49f9e82b-286f-42b5-b006-5fe38a758159" (UID: "49f9e82b-286f-42b5-b006-5fe38a758159"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.552118 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxdzc\" (UniqueName: \"kubernetes.io/projected/49f9e82b-286f-42b5-b006-5fe38a758159-kube-api-access-qxdzc\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.552292 4935 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.552308 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.552319 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.552333 4935 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/49f9e82b-286f-42b5-b006-5fe38a758159-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.552351 4935 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.694392 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"428c8cdb-5fa3-4a5e-b249-1bb3713220a4","Type":"ContainerStarted","Data":"09f48d357b4147199a42713c46398accb8c806c9b6e0a8b9543704ce14c6ecdd"} Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.694604 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.696537 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4ba77f4b-156c-4d2e-9335-dab14bf1dcb3","Type":"ContainerStarted","Data":"6e05ec741c6464c0b3a7568159f631e493f1b06d97051994271cdfe64f9cded7"} Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.696691 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.698474 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-f77cd874b-zfsrx" event={"ID":"cf534e24-32d5-4b58-89d6-ddb49b1fc8aa","Type":"ContainerDied","Data":"4e0522e890498c4bc375602de219f821322a04749380fc34680969e60e9d0dfb"} Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.698518 4935 scope.go:117] "RemoveContainer" containerID="48e6e8c486a39d490586dc18a525bd27d5957a99d7e1e8ee9b4281613c2a6d32" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.698530 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-f77cd874b-zfsrx" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.701824 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" event={"ID":"499c1a41-8227-4cf9-8c15-99fd4a46f013","Type":"ContainerStarted","Data":"9f1cfd343dfed32bfbcdd7865b095ae3e15138f1e4f2f3413cf140aa04026df2"} Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.707807 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-558b54d8fc-h7892" event={"ID":"49f9e82b-286f-42b5-b006-5fe38a758159","Type":"ContainerDied","Data":"e065b4250a439250a2e18b3f86cf4428ab104ba19e435ea7905ca56abd38308f"} Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.707963 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-558b54d8fc-h7892" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.731859 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=51.731842509 podStartE2EDuration="51.731842509s" podCreationTimestamp="2025-12-01 18:58:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:59:44.717563119 +0000 UTC m=+1798.739192378" watchObservedRunningTime="2025-12-01 18:59:44.731842509 +0000 UTC m=+1798.753471768" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.754470 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.754448241 podStartE2EDuration="38.754448241s" podCreationTimestamp="2025-12-01 18:59:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 18:59:44.749126793 +0000 UTC m=+1798.770756062" watchObservedRunningTime="2025-12-01 18:59:44.754448241 +0000 UTC m=+1798.776077490" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.767475 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" podStartSLOduration=2.146460058 podStartE2EDuration="11.767459321s" podCreationTimestamp="2025-12-01 18:59:33 +0000 UTC" firstStartedPulling="2025-12-01 18:59:34.258138044 +0000 UTC m=+1788.279767293" lastFinishedPulling="2025-12-01 18:59:43.879137297 +0000 UTC m=+1797.900766556" observedRunningTime="2025-12-01 18:59:44.764609111 +0000 UTC m=+1798.786238370" watchObservedRunningTime="2025-12-01 18:59:44.767459321 +0000 UTC m=+1798.789088580" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.841747 4935 scope.go:117] "RemoveContainer" containerID="87eedf06818ffbee7cca3a4762e8e3abf4ca9b45d83938b0b4af5c2111cf6ab6" Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.851214 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-f77cd874b-zfsrx"] Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.863361 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-f77cd874b-zfsrx"] Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.875515 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-558b54d8fc-h7892"] Dec 01 18:59:44 crc kubenswrapper[4935]: I1201 18:59:44.886163 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-558b54d8fc-h7892"] Dec 01 18:59:46 crc kubenswrapper[4935]: E1201 18:59:46.393552 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6b4888dce55132e1517ff9655388ed60e5f97f8a462f38516f31e333f2ad680" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 01 18:59:46 crc kubenswrapper[4935]: E1201 18:59:46.395239 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6b4888dce55132e1517ff9655388ed60e5f97f8a462f38516f31e333f2ad680" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 01 18:59:46 crc kubenswrapper[4935]: E1201 18:59:46.397041 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6b4888dce55132e1517ff9655388ed60e5f97f8a462f38516f31e333f2ad680" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 01 18:59:46 crc kubenswrapper[4935]: E1201 18:59:46.397102 4935 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/heat-engine-69cfbb4c64-24bqh" podUID="ce0e14b6-6e7f-467a-bad9-9479311d6c89" containerName="heat-engine" Dec 01 18:59:46 crc kubenswrapper[4935]: I1201 18:59:46.524631 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49f9e82b-286f-42b5-b006-5fe38a758159" path="/var/lib/kubelet/pods/49f9e82b-286f-42b5-b006-5fe38a758159/volumes" Dec 01 18:59:46 crc kubenswrapper[4935]: I1201 18:59:46.527131 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" path="/var/lib/kubelet/pods/cf534e24-32d5-4b58-89d6-ddb49b1fc8aa/volumes" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.606194 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-8dp9s"] Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.622048 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-8dp9s"] Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.704900 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-qm9b2"] Dec 01 18:59:48 crc kubenswrapper[4935]: E1201 18:59:48.705417 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" containerName="heat-api" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.705433 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" containerName="heat-api" Dec 01 18:59:48 crc kubenswrapper[4935]: E1201 18:59:48.705447 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49f9e82b-286f-42b5-b006-5fe38a758159" containerName="heat-cfnapi" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.705452 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="49f9e82b-286f-42b5-b006-5fe38a758159" containerName="heat-cfnapi" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.705666 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="49f9e82b-286f-42b5-b006-5fe38a758159" containerName="heat-cfnapi" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.705687 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" containerName="heat-api" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.706436 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.708908 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.717953 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-qm9b2"] Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.773716 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-config-data\") pod \"aodh-db-sync-qm9b2\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.773983 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg4jr\" (UniqueName: \"kubernetes.io/projected/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-kube-api-access-pg4jr\") pod \"aodh-db-sync-qm9b2\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.774412 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-combined-ca-bundle\") pod \"aodh-db-sync-qm9b2\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.774572 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-scripts\") pod \"aodh-db-sync-qm9b2\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.877014 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-config-data\") pod \"aodh-db-sync-qm9b2\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.877103 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg4jr\" (UniqueName: \"kubernetes.io/projected/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-kube-api-access-pg4jr\") pod \"aodh-db-sync-qm9b2\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.877225 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-combined-ca-bundle\") pod \"aodh-db-sync-qm9b2\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.877279 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-scripts\") pod \"aodh-db-sync-qm9b2\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.895783 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-scripts\") pod \"aodh-db-sync-qm9b2\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.899683 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg4jr\" (UniqueName: \"kubernetes.io/projected/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-kube-api-access-pg4jr\") pod \"aodh-db-sync-qm9b2\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.901177 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-config-data\") pod \"aodh-db-sync-qm9b2\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:48 crc kubenswrapper[4935]: I1201 18:59:48.901521 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-combined-ca-bundle\") pod \"aodh-db-sync-qm9b2\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:49 crc kubenswrapper[4935]: I1201 18:59:49.023030 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-qm9b2" Dec 01 18:59:49 crc kubenswrapper[4935]: I1201 18:59:49.574023 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-qm9b2"] Dec 01 18:59:49 crc kubenswrapper[4935]: I1201 18:59:49.762573 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-qm9b2" event={"ID":"a02039c1-bb22-4f93-99ff-e9d4bead1b9c","Type":"ContainerStarted","Data":"30fea187069ae1985d47e80993b056be25a3d5531a344a2058ba2bb6ff51f420"} Dec 01 18:59:50 crc kubenswrapper[4935]: I1201 18:59:50.531390 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d9754a9-f6c3-405b-ad06-70e432e7eedc" path="/var/lib/kubelet/pods/2d9754a9-f6c3-405b-ad06-70e432e7eedc/volumes" Dec 01 18:59:54 crc kubenswrapper[4935]: I1201 18:59:54.195928 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/heat-api-f77cd874b-zfsrx" podUID="cf534e24-32d5-4b58-89d6-ddb49b1fc8aa" containerName="heat-api" probeResult="failure" output="Get \"https://10.217.0.215:8004/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 01 18:59:54 crc kubenswrapper[4935]: I1201 18:59:54.208919 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/heat-cfnapi-558b54d8fc-h7892" podUID="49f9e82b-286f-42b5-b006-5fe38a758159" containerName="heat-cfnapi" probeResult="failure" output="Get \"https://10.217.0.216:8000/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 01 18:59:55 crc kubenswrapper[4935]: I1201 18:59:55.508507 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 18:59:55 crc kubenswrapper[4935]: E1201 18:59:55.509028 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 18:59:56 crc kubenswrapper[4935]: E1201 18:59:56.392710 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6b4888dce55132e1517ff9655388ed60e5f97f8a462f38516f31e333f2ad680" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 01 18:59:56 crc kubenswrapper[4935]: E1201 18:59:56.394169 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6b4888dce55132e1517ff9655388ed60e5f97f8a462f38516f31e333f2ad680" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 01 18:59:56 crc kubenswrapper[4935]: E1201 18:59:56.395344 4935 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6b4888dce55132e1517ff9655388ed60e5f97f8a462f38516f31e333f2ad680" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 01 18:59:56 crc kubenswrapper[4935]: E1201 18:59:56.395380 4935 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/heat-engine-69cfbb4c64-24bqh" podUID="ce0e14b6-6e7f-467a-bad9-9479311d6c89" containerName="heat-engine" Dec 01 18:59:56 crc kubenswrapper[4935]: I1201 18:59:56.463393 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 01 18:59:56 crc kubenswrapper[4935]: I1201 18:59:56.864533 4935 generic.go:334] "Generic (PLEG): container finished" podID="499c1a41-8227-4cf9-8c15-99fd4a46f013" containerID="9f1cfd343dfed32bfbcdd7865b095ae3e15138f1e4f2f3413cf140aa04026df2" exitCode=0 Dec 01 18:59:56 crc kubenswrapper[4935]: I1201 18:59:56.864909 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" event={"ID":"499c1a41-8227-4cf9-8c15-99fd4a46f013","Type":"ContainerDied","Data":"9f1cfd343dfed32bfbcdd7865b095ae3e15138f1e4f2f3413cf140aa04026df2"} Dec 01 18:59:57 crc kubenswrapper[4935]: I1201 18:59:57.878842 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-qm9b2" event={"ID":"a02039c1-bb22-4f93-99ff-e9d4bead1b9c","Type":"ContainerStarted","Data":"f97c8d7bf3e01b4a6e801943fb78ce5c8aa71110c35ad516cef44d8c11b52b40"} Dec 01 18:59:57 crc kubenswrapper[4935]: I1201 18:59:57.903955 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-qm9b2" podStartSLOduration=2.45576834 podStartE2EDuration="9.903937606s" podCreationTimestamp="2025-12-01 18:59:48 +0000 UTC" firstStartedPulling="2025-12-01 18:59:49.574297982 +0000 UTC m=+1803.595927241" lastFinishedPulling="2025-12-01 18:59:57.022467208 +0000 UTC m=+1811.044096507" observedRunningTime="2025-12-01 18:59:57.894603592 +0000 UTC m=+1811.916232851" watchObservedRunningTime="2025-12-01 18:59:57.903937606 +0000 UTC m=+1811.925566865" Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.433646 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.531703 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-repo-setup-combined-ca-bundle\") pod \"499c1a41-8227-4cf9-8c15-99fd4a46f013\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.531949 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pttsg\" (UniqueName: \"kubernetes.io/projected/499c1a41-8227-4cf9-8c15-99fd4a46f013-kube-api-access-pttsg\") pod \"499c1a41-8227-4cf9-8c15-99fd4a46f013\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.532129 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-inventory\") pod \"499c1a41-8227-4cf9-8c15-99fd4a46f013\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.532187 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-ssh-key\") pod \"499c1a41-8227-4cf9-8c15-99fd4a46f013\" (UID: \"499c1a41-8227-4cf9-8c15-99fd4a46f013\") " Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.539292 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "499c1a41-8227-4cf9-8c15-99fd4a46f013" (UID: "499c1a41-8227-4cf9-8c15-99fd4a46f013"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.552462 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/499c1a41-8227-4cf9-8c15-99fd4a46f013-kube-api-access-pttsg" (OuterVolumeSpecName: "kube-api-access-pttsg") pod "499c1a41-8227-4cf9-8c15-99fd4a46f013" (UID: "499c1a41-8227-4cf9-8c15-99fd4a46f013"). InnerVolumeSpecName "kube-api-access-pttsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.566677 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-inventory" (OuterVolumeSpecName: "inventory") pod "499c1a41-8227-4cf9-8c15-99fd4a46f013" (UID: "499c1a41-8227-4cf9-8c15-99fd4a46f013"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.570746 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "499c1a41-8227-4cf9-8c15-99fd4a46f013" (UID: "499c1a41-8227-4cf9-8c15-99fd4a46f013"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.634951 4935 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.634985 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pttsg\" (UniqueName: \"kubernetes.io/projected/499c1a41-8227-4cf9-8c15-99fd4a46f013-kube-api-access-pttsg\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.634995 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.635003 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/499c1a41-8227-4cf9-8c15-99fd4a46f013-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.894720 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.894883 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl" event={"ID":"499c1a41-8227-4cf9-8c15-99fd4a46f013","Type":"ContainerDied","Data":"1d16d61b026b34fa30c9ae316e87f7bc822a708781b2121e5851f338b4e4ad46"} Dec 01 18:59:58 crc kubenswrapper[4935]: I1201 18:59:58.896867 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d16d61b026b34fa30c9ae316e87f7bc822a708781b2121e5851f338b4e4ad46" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.040806 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2"] Dec 01 18:59:59 crc kubenswrapper[4935]: E1201 18:59:59.041730 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="499c1a41-8227-4cf9-8c15-99fd4a46f013" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.041903 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="499c1a41-8227-4cf9-8c15-99fd4a46f013" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.042325 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="499c1a41-8227-4cf9-8c15-99fd4a46f013" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.043450 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.046035 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.046478 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.047889 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.048075 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.052278 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2"] Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.163052 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phr5w\" (UniqueName: \"kubernetes.io/projected/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-kube-api-access-phr5w\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-n5wm2\" (UID: \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.163206 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-n5wm2\" (UID: \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.163240 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-n5wm2\" (UID: \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.265586 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phr5w\" (UniqueName: \"kubernetes.io/projected/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-kube-api-access-phr5w\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-n5wm2\" (UID: \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.265722 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-n5wm2\" (UID: \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.265773 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-n5wm2\" (UID: \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.270936 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-n5wm2\" (UID: \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.276105 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-n5wm2\" (UID: \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.290195 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phr5w\" (UniqueName: \"kubernetes.io/projected/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-kube-api-access-phr5w\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-n5wm2\" (UID: \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.370880 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.909073 4935 generic.go:334] "Generic (PLEG): container finished" podID="a02039c1-bb22-4f93-99ff-e9d4bead1b9c" containerID="f97c8d7bf3e01b4a6e801943fb78ce5c8aa71110c35ad516cef44d8c11b52b40" exitCode=0 Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.909185 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-qm9b2" event={"ID":"a02039c1-bb22-4f93-99ff-e9d4bead1b9c","Type":"ContainerDied","Data":"f97c8d7bf3e01b4a6e801943fb78ce5c8aa71110c35ad516cef44d8c11b52b40"} Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.913781 4935 generic.go:334] "Generic (PLEG): container finished" podID="ce0e14b6-6e7f-467a-bad9-9479311d6c89" containerID="a6b4888dce55132e1517ff9655388ed60e5f97f8a462f38516f31e333f2ad680" exitCode=0 Dec 01 18:59:59 crc kubenswrapper[4935]: I1201 18:59:59.913831 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-69cfbb4c64-24bqh" event={"ID":"ce0e14b6-6e7f-467a-bad9-9479311d6c89","Type":"ContainerDied","Data":"a6b4888dce55132e1517ff9655388ed60e5f97f8a462f38516f31e333f2ad680"} Dec 01 19:00:00 crc kubenswrapper[4935]: W1201 19:00:00.061177 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6b95e39_a3e1_4e74_9e30_6c29a6aa8096.slice/crio-fac517e2862c50f07191dd54a91658bae60ba9c76271f9893d7e814adf615698 WatchSource:0}: Error finding container fac517e2862c50f07191dd54a91658bae60ba9c76271f9893d7e814adf615698: Status 404 returned error can't find the container with id fac517e2862c50f07191dd54a91658bae60ba9c76271f9893d7e814adf615698 Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.067093 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2"] Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.139252 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf"] Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.140882 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.143272 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.144015 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.163879 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf"] Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.167067 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.300284 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-config-data\") pod \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.300514 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6m597\" (UniqueName: \"kubernetes.io/projected/ce0e14b6-6e7f-467a-bad9-9479311d6c89-kube-api-access-6m597\") pod \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.300592 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-config-data-custom\") pod \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.300644 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-combined-ca-bundle\") pod \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\" (UID: \"ce0e14b6-6e7f-467a-bad9-9479311d6c89\") " Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.301282 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec6a2572-33e9-4baf-965e-dc529220bd30-config-volume\") pod \"collect-profiles-29410260-q2wwf\" (UID: \"ec6a2572-33e9-4baf-965e-dc529220bd30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.301452 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdrnn\" (UniqueName: \"kubernetes.io/projected/ec6a2572-33e9-4baf-965e-dc529220bd30-kube-api-access-hdrnn\") pod \"collect-profiles-29410260-q2wwf\" (UID: \"ec6a2572-33e9-4baf-965e-dc529220bd30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.301492 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec6a2572-33e9-4baf-965e-dc529220bd30-secret-volume\") pod \"collect-profiles-29410260-q2wwf\" (UID: \"ec6a2572-33e9-4baf-965e-dc529220bd30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.306228 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ce0e14b6-6e7f-467a-bad9-9479311d6c89" (UID: "ce0e14b6-6e7f-467a-bad9-9479311d6c89"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.308883 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce0e14b6-6e7f-467a-bad9-9479311d6c89-kube-api-access-6m597" (OuterVolumeSpecName: "kube-api-access-6m597") pod "ce0e14b6-6e7f-467a-bad9-9479311d6c89" (UID: "ce0e14b6-6e7f-467a-bad9-9479311d6c89"). InnerVolumeSpecName "kube-api-access-6m597". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.347560 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce0e14b6-6e7f-467a-bad9-9479311d6c89" (UID: "ce0e14b6-6e7f-467a-bad9-9479311d6c89"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.370382 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-config-data" (OuterVolumeSpecName: "config-data") pod "ce0e14b6-6e7f-467a-bad9-9479311d6c89" (UID: "ce0e14b6-6e7f-467a-bad9-9479311d6c89"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.403247 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdrnn\" (UniqueName: \"kubernetes.io/projected/ec6a2572-33e9-4baf-965e-dc529220bd30-kube-api-access-hdrnn\") pod \"collect-profiles-29410260-q2wwf\" (UID: \"ec6a2572-33e9-4baf-965e-dc529220bd30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.403497 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec6a2572-33e9-4baf-965e-dc529220bd30-secret-volume\") pod \"collect-profiles-29410260-q2wwf\" (UID: \"ec6a2572-33e9-4baf-965e-dc529220bd30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.403653 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec6a2572-33e9-4baf-965e-dc529220bd30-config-volume\") pod \"collect-profiles-29410260-q2wwf\" (UID: \"ec6a2572-33e9-4baf-965e-dc529220bd30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.403727 4935 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.403738 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.403749 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce0e14b6-6e7f-467a-bad9-9479311d6c89-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.403758 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6m597\" (UniqueName: \"kubernetes.io/projected/ce0e14b6-6e7f-467a-bad9-9479311d6c89-kube-api-access-6m597\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.404512 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec6a2572-33e9-4baf-965e-dc529220bd30-config-volume\") pod \"collect-profiles-29410260-q2wwf\" (UID: \"ec6a2572-33e9-4baf-965e-dc529220bd30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.410755 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec6a2572-33e9-4baf-965e-dc529220bd30-secret-volume\") pod \"collect-profiles-29410260-q2wwf\" (UID: \"ec6a2572-33e9-4baf-965e-dc529220bd30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.426585 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdrnn\" (UniqueName: \"kubernetes.io/projected/ec6a2572-33e9-4baf-965e-dc529220bd30-kube-api-access-hdrnn\") pod \"collect-profiles-29410260-q2wwf\" (UID: \"ec6a2572-33e9-4baf-965e-dc529220bd30\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.479982 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.942767 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" event={"ID":"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096","Type":"ContainerStarted","Data":"fac517e2862c50f07191dd54a91658bae60ba9c76271f9893d7e814adf615698"} Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.945733 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-69cfbb4c64-24bqh" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.946317 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-69cfbb4c64-24bqh" event={"ID":"ce0e14b6-6e7f-467a-bad9-9479311d6c89","Type":"ContainerDied","Data":"aa2a321367438e8c7f189af66a93deda58f1f2a6897407e214c4225649e51244"} Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.946368 4935 scope.go:117] "RemoveContainer" containerID="a6b4888dce55132e1517ff9655388ed60e5f97f8a462f38516f31e333f2ad680" Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.984068 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf"] Dec 01 19:00:00 crc kubenswrapper[4935]: I1201 19:00:00.995335 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-69cfbb4c64-24bqh"] Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.005412 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-engine-69cfbb4c64-24bqh"] Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.256899 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-qm9b2" Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.426078 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-combined-ca-bundle\") pod \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.426388 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-scripts\") pod \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.426427 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pg4jr\" (UniqueName: \"kubernetes.io/projected/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-kube-api-access-pg4jr\") pod \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.426496 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-config-data\") pod \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\" (UID: \"a02039c1-bb22-4f93-99ff-e9d4bead1b9c\") " Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.436318 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-scripts" (OuterVolumeSpecName: "scripts") pod "a02039c1-bb22-4f93-99ff-e9d4bead1b9c" (UID: "a02039c1-bb22-4f93-99ff-e9d4bead1b9c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.444319 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-kube-api-access-pg4jr" (OuterVolumeSpecName: "kube-api-access-pg4jr") pod "a02039c1-bb22-4f93-99ff-e9d4bead1b9c" (UID: "a02039c1-bb22-4f93-99ff-e9d4bead1b9c"). InnerVolumeSpecName "kube-api-access-pg4jr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.466206 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a02039c1-bb22-4f93-99ff-e9d4bead1b9c" (UID: "a02039c1-bb22-4f93-99ff-e9d4bead1b9c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.477342 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-config-data" (OuterVolumeSpecName: "config-data") pod "a02039c1-bb22-4f93-99ff-e9d4bead1b9c" (UID: "a02039c1-bb22-4f93-99ff-e9d4bead1b9c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.529164 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.529192 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pg4jr\" (UniqueName: \"kubernetes.io/projected/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-kube-api-access-pg4jr\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.529203 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.529213 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02039c1-bb22-4f93-99ff-e9d4bead1b9c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.987807 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-qm9b2" event={"ID":"a02039c1-bb22-4f93-99ff-e9d4bead1b9c","Type":"ContainerDied","Data":"30fea187069ae1985d47e80993b056be25a3d5531a344a2058ba2bb6ff51f420"} Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.987854 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30fea187069ae1985d47e80993b056be25a3d5531a344a2058ba2bb6ff51f420" Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.987938 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-qm9b2" Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.994627 4935 generic.go:334] "Generic (PLEG): container finished" podID="ec6a2572-33e9-4baf-965e-dc529220bd30" containerID="2fe3214fbccb68b65de05277def3e067647ccb73c7d17f90dcbe3d2fb1aaff4b" exitCode=0 Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.994688 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" event={"ID":"ec6a2572-33e9-4baf-965e-dc529220bd30","Type":"ContainerDied","Data":"2fe3214fbccb68b65de05277def3e067647ccb73c7d17f90dcbe3d2fb1aaff4b"} Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.994716 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" event={"ID":"ec6a2572-33e9-4baf-965e-dc529220bd30","Type":"ContainerStarted","Data":"0fb0f6bb491cf00f0e1401575de74e7c37c2108aa82ad47d01d010ec7f98fcd4"} Dec 01 19:00:01 crc kubenswrapper[4935]: I1201 19:00:01.997841 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" event={"ID":"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096","Type":"ContainerStarted","Data":"49f35f61e7164ecda78791e3330f1bb36529a5b7a05fea44d814564f847eeca1"} Dec 01 19:00:02 crc kubenswrapper[4935]: I1201 19:00:02.062632 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" podStartSLOduration=2.314443246 podStartE2EDuration="3.062614994s" podCreationTimestamp="2025-12-01 18:59:59 +0000 UTC" firstStartedPulling="2025-12-01 19:00:00.063701991 +0000 UTC m=+1814.085331250" lastFinishedPulling="2025-12-01 19:00:00.811873739 +0000 UTC m=+1814.833502998" observedRunningTime="2025-12-01 19:00:02.054660711 +0000 UTC m=+1816.076289970" watchObservedRunningTime="2025-12-01 19:00:02.062614994 +0000 UTC m=+1816.084244253" Dec 01 19:00:02 crc kubenswrapper[4935]: I1201 19:00:02.530745 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce0e14b6-6e7f-467a-bad9-9479311d6c89" path="/var/lib/kubelet/pods/ce0e14b6-6e7f-467a-bad9-9479311d6c89/volumes" Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.497897 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.532350 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.666134 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec6a2572-33e9-4baf-965e-dc529220bd30-secret-volume\") pod \"ec6a2572-33e9-4baf-965e-dc529220bd30\" (UID: \"ec6a2572-33e9-4baf-965e-dc529220bd30\") " Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.666489 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdrnn\" (UniqueName: \"kubernetes.io/projected/ec6a2572-33e9-4baf-965e-dc529220bd30-kube-api-access-hdrnn\") pod \"ec6a2572-33e9-4baf-965e-dc529220bd30\" (UID: \"ec6a2572-33e9-4baf-965e-dc529220bd30\") " Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.666571 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec6a2572-33e9-4baf-965e-dc529220bd30-config-volume\") pod \"ec6a2572-33e9-4baf-965e-dc529220bd30\" (UID: \"ec6a2572-33e9-4baf-965e-dc529220bd30\") " Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.667307 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec6a2572-33e9-4baf-965e-dc529220bd30-config-volume" (OuterVolumeSpecName: "config-volume") pod "ec6a2572-33e9-4baf-965e-dc529220bd30" (UID: "ec6a2572-33e9-4baf-965e-dc529220bd30"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.675712 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec6a2572-33e9-4baf-965e-dc529220bd30-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ec6a2572-33e9-4baf-965e-dc529220bd30" (UID: "ec6a2572-33e9-4baf-965e-dc529220bd30"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.690903 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec6a2572-33e9-4baf-965e-dc529220bd30-kube-api-access-hdrnn" (OuterVolumeSpecName: "kube-api-access-hdrnn") pod "ec6a2572-33e9-4baf-965e-dc529220bd30" (UID: "ec6a2572-33e9-4baf-965e-dc529220bd30"). InnerVolumeSpecName "kube-api-access-hdrnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.770785 4935 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec6a2572-33e9-4baf-965e-dc529220bd30-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.770830 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdrnn\" (UniqueName: \"kubernetes.io/projected/ec6a2572-33e9-4baf-965e-dc529220bd30-kube-api-access-hdrnn\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.770845 4935 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec6a2572-33e9-4baf-965e-dc529220bd30-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.809336 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.809600 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-api" containerID="cri-o://2e1c86da2fc5992d0024d3840d88bc61ce244f4bfbf616bb34a7fb3010f0a6a5" gracePeriod=30 Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.810016 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-evaluator" containerID="cri-o://931c8fdc79fdca359e9542bd15ce7813cd151dd0f71262c9ebe1b5b1e826b43f" gracePeriod=30 Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.810047 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-notifier" containerID="cri-o://7f7b0e4c3ebd8f536ef143f9f194f51672d8cd6742642d58610cb60c8dee47ed" gracePeriod=30 Dec 01 19:00:03 crc kubenswrapper[4935]: I1201 19:00:03.810204 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-listener" containerID="cri-o://4f5a8d434254783f0c5d57a8f4e5c27e4c80fef3e3555347043338d06124bb4b" gracePeriod=30 Dec 01 19:00:04 crc kubenswrapper[4935]: I1201 19:00:04.028918 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" Dec 01 19:00:04 crc kubenswrapper[4935]: I1201 19:00:04.029664 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf" event={"ID":"ec6a2572-33e9-4baf-965e-dc529220bd30","Type":"ContainerDied","Data":"0fb0f6bb491cf00f0e1401575de74e7c37c2108aa82ad47d01d010ec7f98fcd4"} Dec 01 19:00:04 crc kubenswrapper[4935]: I1201 19:00:04.029713 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fb0f6bb491cf00f0e1401575de74e7c37c2108aa82ad47d01d010ec7f98fcd4" Dec 01 19:00:04 crc kubenswrapper[4935]: I1201 19:00:04.035027 4935 generic.go:334] "Generic (PLEG): container finished" podID="a6b95e39-a3e1-4e74-9e30-6c29a6aa8096" containerID="49f35f61e7164ecda78791e3330f1bb36529a5b7a05fea44d814564f847eeca1" exitCode=0 Dec 01 19:00:04 crc kubenswrapper[4935]: I1201 19:00:04.035071 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" event={"ID":"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096","Type":"ContainerDied","Data":"49f35f61e7164ecda78791e3330f1bb36529a5b7a05fea44d814564f847eeca1"} Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.048709 4935 generic.go:334] "Generic (PLEG): container finished" podID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerID="4f5a8d434254783f0c5d57a8f4e5c27e4c80fef3e3555347043338d06124bb4b" exitCode=0 Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.049045 4935 generic.go:334] "Generic (PLEG): container finished" podID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerID="931c8fdc79fdca359e9542bd15ce7813cd151dd0f71262c9ebe1b5b1e826b43f" exitCode=0 Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.049054 4935 generic.go:334] "Generic (PLEG): container finished" podID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerID="2e1c86da2fc5992d0024d3840d88bc61ce244f4bfbf616bb34a7fb3010f0a6a5" exitCode=0 Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.048785 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"32e6fc0e-c91a-4314-bc53-d5053316f8cd","Type":"ContainerDied","Data":"4f5a8d434254783f0c5d57a8f4e5c27e4c80fef3e3555347043338d06124bb4b"} Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.049199 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"32e6fc0e-c91a-4314-bc53-d5053316f8cd","Type":"ContainerDied","Data":"931c8fdc79fdca359e9542bd15ce7813cd151dd0f71262c9ebe1b5b1e826b43f"} Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.049215 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"32e6fc0e-c91a-4314-bc53-d5053316f8cd","Type":"ContainerDied","Data":"2e1c86da2fc5992d0024d3840d88bc61ce244f4bfbf616bb34a7fb3010f0a6a5"} Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.569908 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.717644 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-ssh-key\") pod \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\" (UID: \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\") " Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.717743 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-inventory\") pod \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\" (UID: \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\") " Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.717795 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phr5w\" (UniqueName: \"kubernetes.io/projected/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-kube-api-access-phr5w\") pod \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\" (UID: \"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096\") " Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.752509 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-kube-api-access-phr5w" (OuterVolumeSpecName: "kube-api-access-phr5w") pod "a6b95e39-a3e1-4e74-9e30-6c29a6aa8096" (UID: "a6b95e39-a3e1-4e74-9e30-6c29a6aa8096"). InnerVolumeSpecName "kube-api-access-phr5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.755727 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a6b95e39-a3e1-4e74-9e30-6c29a6aa8096" (UID: "a6b95e39-a3e1-4e74-9e30-6c29a6aa8096"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.756196 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-inventory" (OuterVolumeSpecName: "inventory") pod "a6b95e39-a3e1-4e74-9e30-6c29a6aa8096" (UID: "a6b95e39-a3e1-4e74-9e30-6c29a6aa8096"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.820851 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.820886 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:05 crc kubenswrapper[4935]: I1201 19:00:05.820896 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phr5w\" (UniqueName: \"kubernetes.io/projected/a6b95e39-a3e1-4e74-9e30-6c29a6aa8096-kube-api-access-phr5w\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.067571 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" event={"ID":"a6b95e39-a3e1-4e74-9e30-6c29a6aa8096","Type":"ContainerDied","Data":"fac517e2862c50f07191dd54a91658bae60ba9c76271f9893d7e814adf615698"} Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.067924 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fac517e2862c50f07191dd54a91658bae60ba9c76271f9893d7e814adf615698" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.067990 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-n5wm2" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.479133 4935 scope.go:117] "RemoveContainer" containerID="1f6e0bda07250babfb8ad978d9a7ce82f21dedc1cee57203b4539898d7ccbb24" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.535413 4935 scope.go:117] "RemoveContainer" containerID="2ad41ceccfefd9b117755b9f76eb8900954ae87e9d3d2ef4ffdb6a4ec7e67b24" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.669608 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf"] Dec 01 19:00:06 crc kubenswrapper[4935]: E1201 19:00:06.670314 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a02039c1-bb22-4f93-99ff-e9d4bead1b9c" containerName="aodh-db-sync" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.670331 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="a02039c1-bb22-4f93-99ff-e9d4bead1b9c" containerName="aodh-db-sync" Dec 01 19:00:06 crc kubenswrapper[4935]: E1201 19:00:06.670360 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce0e14b6-6e7f-467a-bad9-9479311d6c89" containerName="heat-engine" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.670369 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce0e14b6-6e7f-467a-bad9-9479311d6c89" containerName="heat-engine" Dec 01 19:00:06 crc kubenswrapper[4935]: E1201 19:00:06.670417 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec6a2572-33e9-4baf-965e-dc529220bd30" containerName="collect-profiles" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.670425 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec6a2572-33e9-4baf-965e-dc529220bd30" containerName="collect-profiles" Dec 01 19:00:06 crc kubenswrapper[4935]: E1201 19:00:06.670448 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6b95e39-a3e1-4e74-9e30-6c29a6aa8096" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.670458 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6b95e39-a3e1-4e74-9e30-6c29a6aa8096" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.670775 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec6a2572-33e9-4baf-965e-dc529220bd30" containerName="collect-profiles" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.670806 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6b95e39-a3e1-4e74-9e30-6c29a6aa8096" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.670836 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce0e14b6-6e7f-467a-bad9-9479311d6c89" containerName="heat-engine" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.670848 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="a02039c1-bb22-4f93-99ff-e9d4bead1b9c" containerName="aodh-db-sync" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.671827 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.678986 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.679186 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.679270 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.679449 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.720550 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf"] Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.846913 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.846975 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.847209 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.847276 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7j4f\" (UniqueName: \"kubernetes.io/projected/a552da3f-247b-4339-a48d-79a3c948af00-kube-api-access-c7j4f\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.949125 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.949525 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7j4f\" (UniqueName: \"kubernetes.io/projected/a552da3f-247b-4339-a48d-79a3c948af00-kube-api-access-c7j4f\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.949611 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.949650 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.953991 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.954539 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.957479 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:06 crc kubenswrapper[4935]: I1201 19:00:06.970876 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7j4f\" (UniqueName: \"kubernetes.io/projected/a552da3f-247b-4339-a48d-79a3c948af00-kube-api-access-c7j4f\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:07 crc kubenswrapper[4935]: I1201 19:00:07.037821 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:00:07 crc kubenswrapper[4935]: I1201 19:00:07.509367 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 19:00:07 crc kubenswrapper[4935]: E1201 19:00:07.509704 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:00:07 crc kubenswrapper[4935]: I1201 19:00:07.665545 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf"] Dec 01 19:00:07 crc kubenswrapper[4935]: W1201 19:00:07.673781 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda552da3f_247b_4339_a48d_79a3c948af00.slice/crio-8ecd28f91f0f3a28be23e1b2785dc2edbc44abf48275cad0ec8c4dd2849183f5 WatchSource:0}: Error finding container 8ecd28f91f0f3a28be23e1b2785dc2edbc44abf48275cad0ec8c4dd2849183f5: Status 404 returned error can't find the container with id 8ecd28f91f0f3a28be23e1b2785dc2edbc44abf48275cad0ec8c4dd2849183f5 Dec 01 19:00:08 crc kubenswrapper[4935]: I1201 19:00:08.095395 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" event={"ID":"a552da3f-247b-4339-a48d-79a3c948af00","Type":"ContainerStarted","Data":"8ecd28f91f0f3a28be23e1b2785dc2edbc44abf48275cad0ec8c4dd2849183f5"} Dec 01 19:00:09 crc kubenswrapper[4935]: I1201 19:00:09.107255 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" event={"ID":"a552da3f-247b-4339-a48d-79a3c948af00","Type":"ContainerStarted","Data":"aa448f702281f5f7a56190b6a9683320ffb1f06d94bde4fb63875ab1eb82f242"} Dec 01 19:00:09 crc kubenswrapper[4935]: I1201 19:00:09.123022 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" podStartSLOduration=2.414047428 podStartE2EDuration="3.123004937s" podCreationTimestamp="2025-12-01 19:00:06 +0000 UTC" firstStartedPulling="2025-12-01 19:00:07.675784497 +0000 UTC m=+1821.697413756" lastFinishedPulling="2025-12-01 19:00:08.384741976 +0000 UTC m=+1822.406371265" observedRunningTime="2025-12-01 19:00:09.122522062 +0000 UTC m=+1823.144151321" watchObservedRunningTime="2025-12-01 19:00:09.123004937 +0000 UTC m=+1823.144634196" Dec 01 19:00:11 crc kubenswrapper[4935]: I1201 19:00:11.920888 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.006472 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-public-tls-certs\") pod \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.006658 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-internal-tls-certs\") pod \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.006953 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-scripts\") pod \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.006995 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8zkn\" (UniqueName: \"kubernetes.io/projected/32e6fc0e-c91a-4314-bc53-d5053316f8cd-kube-api-access-c8zkn\") pod \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.007069 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-combined-ca-bundle\") pod \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.007179 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-config-data\") pod \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\" (UID: \"32e6fc0e-c91a-4314-bc53-d5053316f8cd\") " Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.016480 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32e6fc0e-c91a-4314-bc53-d5053316f8cd-kube-api-access-c8zkn" (OuterVolumeSpecName: "kube-api-access-c8zkn") pod "32e6fc0e-c91a-4314-bc53-d5053316f8cd" (UID: "32e6fc0e-c91a-4314-bc53-d5053316f8cd"). InnerVolumeSpecName "kube-api-access-c8zkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.018752 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-scripts" (OuterVolumeSpecName: "scripts") pod "32e6fc0e-c91a-4314-bc53-d5053316f8cd" (UID: "32e6fc0e-c91a-4314-bc53-d5053316f8cd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.110932 4935 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.110965 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8zkn\" (UniqueName: \"kubernetes.io/projected/32e6fc0e-c91a-4314-bc53-d5053316f8cd-kube-api-access-c8zkn\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.119439 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "32e6fc0e-c91a-4314-bc53-d5053316f8cd" (UID: "32e6fc0e-c91a-4314-bc53-d5053316f8cd"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.128415 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "32e6fc0e-c91a-4314-bc53-d5053316f8cd" (UID: "32e6fc0e-c91a-4314-bc53-d5053316f8cd"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.146677 4935 generic.go:334] "Generic (PLEG): container finished" podID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerID="7f7b0e4c3ebd8f536ef143f9f194f51672d8cd6742642d58610cb60c8dee47ed" exitCode=0 Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.146718 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"32e6fc0e-c91a-4314-bc53-d5053316f8cd","Type":"ContainerDied","Data":"7f7b0e4c3ebd8f536ef143f9f194f51672d8cd6742642d58610cb60c8dee47ed"} Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.146742 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"32e6fc0e-c91a-4314-bc53-d5053316f8cd","Type":"ContainerDied","Data":"6c1619208c604c0a35c8726749db796aba8dda87673f26257f17271748dba156"} Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.146756 4935 scope.go:117] "RemoveContainer" containerID="4f5a8d434254783f0c5d57a8f4e5c27e4c80fef3e3555347043338d06124bb4b" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.146918 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.170304 4935 scope.go:117] "RemoveContainer" containerID="7f7b0e4c3ebd8f536ef143f9f194f51672d8cd6742642d58610cb60c8dee47ed" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.186759 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32e6fc0e-c91a-4314-bc53-d5053316f8cd" (UID: "32e6fc0e-c91a-4314-bc53-d5053316f8cd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.191289 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-config-data" (OuterVolumeSpecName: "config-data") pod "32e6fc0e-c91a-4314-bc53-d5053316f8cd" (UID: "32e6fc0e-c91a-4314-bc53-d5053316f8cd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.198904 4935 scope.go:117] "RemoveContainer" containerID="931c8fdc79fdca359e9542bd15ce7813cd151dd0f71262c9ebe1b5b1e826b43f" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.214748 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.214775 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.214784 4935 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.214794 4935 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32e6fc0e-c91a-4314-bc53-d5053316f8cd-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.222848 4935 scope.go:117] "RemoveContainer" containerID="2e1c86da2fc5992d0024d3840d88bc61ce244f4bfbf616bb34a7fb3010f0a6a5" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.256939 4935 scope.go:117] "RemoveContainer" containerID="4f5a8d434254783f0c5d57a8f4e5c27e4c80fef3e3555347043338d06124bb4b" Dec 01 19:00:12 crc kubenswrapper[4935]: E1201 19:00:12.257330 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f5a8d434254783f0c5d57a8f4e5c27e4c80fef3e3555347043338d06124bb4b\": container with ID starting with 4f5a8d434254783f0c5d57a8f4e5c27e4c80fef3e3555347043338d06124bb4b not found: ID does not exist" containerID="4f5a8d434254783f0c5d57a8f4e5c27e4c80fef3e3555347043338d06124bb4b" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.257396 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f5a8d434254783f0c5d57a8f4e5c27e4c80fef3e3555347043338d06124bb4b"} err="failed to get container status \"4f5a8d434254783f0c5d57a8f4e5c27e4c80fef3e3555347043338d06124bb4b\": rpc error: code = NotFound desc = could not find container \"4f5a8d434254783f0c5d57a8f4e5c27e4c80fef3e3555347043338d06124bb4b\": container with ID starting with 4f5a8d434254783f0c5d57a8f4e5c27e4c80fef3e3555347043338d06124bb4b not found: ID does not exist" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.257422 4935 scope.go:117] "RemoveContainer" containerID="7f7b0e4c3ebd8f536ef143f9f194f51672d8cd6742642d58610cb60c8dee47ed" Dec 01 19:00:12 crc kubenswrapper[4935]: E1201 19:00:12.257656 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f7b0e4c3ebd8f536ef143f9f194f51672d8cd6742642d58610cb60c8dee47ed\": container with ID starting with 7f7b0e4c3ebd8f536ef143f9f194f51672d8cd6742642d58610cb60c8dee47ed not found: ID does not exist" containerID="7f7b0e4c3ebd8f536ef143f9f194f51672d8cd6742642d58610cb60c8dee47ed" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.257688 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f7b0e4c3ebd8f536ef143f9f194f51672d8cd6742642d58610cb60c8dee47ed"} err="failed to get container status \"7f7b0e4c3ebd8f536ef143f9f194f51672d8cd6742642d58610cb60c8dee47ed\": rpc error: code = NotFound desc = could not find container \"7f7b0e4c3ebd8f536ef143f9f194f51672d8cd6742642d58610cb60c8dee47ed\": container with ID starting with 7f7b0e4c3ebd8f536ef143f9f194f51672d8cd6742642d58610cb60c8dee47ed not found: ID does not exist" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.257708 4935 scope.go:117] "RemoveContainer" containerID="931c8fdc79fdca359e9542bd15ce7813cd151dd0f71262c9ebe1b5b1e826b43f" Dec 01 19:00:12 crc kubenswrapper[4935]: E1201 19:00:12.257917 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"931c8fdc79fdca359e9542bd15ce7813cd151dd0f71262c9ebe1b5b1e826b43f\": container with ID starting with 931c8fdc79fdca359e9542bd15ce7813cd151dd0f71262c9ebe1b5b1e826b43f not found: ID does not exist" containerID="931c8fdc79fdca359e9542bd15ce7813cd151dd0f71262c9ebe1b5b1e826b43f" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.257944 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"931c8fdc79fdca359e9542bd15ce7813cd151dd0f71262c9ebe1b5b1e826b43f"} err="failed to get container status \"931c8fdc79fdca359e9542bd15ce7813cd151dd0f71262c9ebe1b5b1e826b43f\": rpc error: code = NotFound desc = could not find container \"931c8fdc79fdca359e9542bd15ce7813cd151dd0f71262c9ebe1b5b1e826b43f\": container with ID starting with 931c8fdc79fdca359e9542bd15ce7813cd151dd0f71262c9ebe1b5b1e826b43f not found: ID does not exist" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.257960 4935 scope.go:117] "RemoveContainer" containerID="2e1c86da2fc5992d0024d3840d88bc61ce244f4bfbf616bb34a7fb3010f0a6a5" Dec 01 19:00:12 crc kubenswrapper[4935]: E1201 19:00:12.258228 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e1c86da2fc5992d0024d3840d88bc61ce244f4bfbf616bb34a7fb3010f0a6a5\": container with ID starting with 2e1c86da2fc5992d0024d3840d88bc61ce244f4bfbf616bb34a7fb3010f0a6a5 not found: ID does not exist" containerID="2e1c86da2fc5992d0024d3840d88bc61ce244f4bfbf616bb34a7fb3010f0a6a5" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.258255 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e1c86da2fc5992d0024d3840d88bc61ce244f4bfbf616bb34a7fb3010f0a6a5"} err="failed to get container status \"2e1c86da2fc5992d0024d3840d88bc61ce244f4bfbf616bb34a7fb3010f0a6a5\": rpc error: code = NotFound desc = could not find container \"2e1c86da2fc5992d0024d3840d88bc61ce244f4bfbf616bb34a7fb3010f0a6a5\": container with ID starting with 2e1c86da2fc5992d0024d3840d88bc61ce244f4bfbf616bb34a7fb3010f0a6a5 not found: ID does not exist" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.543740 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.592464 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-0"] Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.618827 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Dec 01 19:00:12 crc kubenswrapper[4935]: E1201 19:00:12.619734 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-api" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.619768 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-api" Dec 01 19:00:12 crc kubenswrapper[4935]: E1201 19:00:12.619791 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-notifier" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.619805 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-notifier" Dec 01 19:00:12 crc kubenswrapper[4935]: E1201 19:00:12.619851 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-evaluator" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.619866 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-evaluator" Dec 01 19:00:12 crc kubenswrapper[4935]: E1201 19:00:12.619895 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-listener" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.619907 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-listener" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.620412 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-api" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.620460 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-evaluator" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.620481 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-notifier" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.620510 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" containerName="aodh-listener" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.623982 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.629567 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.629615 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-l8dhf" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.629791 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-public-svc" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.629878 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-internal-svc" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.630032 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.635812 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.727647 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsw52\" (UniqueName: \"kubernetes.io/projected/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-kube-api-access-fsw52\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.727925 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-config-data\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.728296 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-internal-tls-certs\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.728434 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-scripts\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.728674 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-combined-ca-bundle\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.728738 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-public-tls-certs\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.830818 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-scripts\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.831163 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-combined-ca-bundle\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.831188 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-public-tls-certs\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.831228 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsw52\" (UniqueName: \"kubernetes.io/projected/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-kube-api-access-fsw52\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.831307 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-config-data\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.831396 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-internal-tls-certs\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.839820 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-scripts\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.840590 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-internal-tls-certs\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.843680 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-public-tls-certs\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.850455 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-config-data\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.850715 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-combined-ca-bundle\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.860818 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsw52\" (UniqueName: \"kubernetes.io/projected/33ca5248-0c8e-4b6d-81ad-15c4c328dbd8-kube-api-access-fsw52\") pod \"aodh-0\" (UID: \"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8\") " pod="openstack/aodh-0" Dec 01 19:00:12 crc kubenswrapper[4935]: I1201 19:00:12.939721 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 01 19:00:13 crc kubenswrapper[4935]: I1201 19:00:13.506130 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 01 19:00:13 crc kubenswrapper[4935]: W1201 19:00:13.517377 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33ca5248_0c8e_4b6d_81ad_15c4c328dbd8.slice/crio-57aa543c2bc5ab0fd2300291c0f222623e3da6deed8cdf7c69443fb6bb5baea7 WatchSource:0}: Error finding container 57aa543c2bc5ab0fd2300291c0f222623e3da6deed8cdf7c69443fb6bb5baea7: Status 404 returned error can't find the container with id 57aa543c2bc5ab0fd2300291c0f222623e3da6deed8cdf7c69443fb6bb5baea7 Dec 01 19:00:14 crc kubenswrapper[4935]: I1201 19:00:14.178257 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8","Type":"ContainerStarted","Data":"eae137b32a21103be97914ab9e5dc707f56842322c09d2654ffac85583152983"} Dec 01 19:00:14 crc kubenswrapper[4935]: I1201 19:00:14.178714 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8","Type":"ContainerStarted","Data":"57aa543c2bc5ab0fd2300291c0f222623e3da6deed8cdf7c69443fb6bb5baea7"} Dec 01 19:00:14 crc kubenswrapper[4935]: I1201 19:00:14.523965 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32e6fc0e-c91a-4314-bc53-d5053316f8cd" path="/var/lib/kubelet/pods/32e6fc0e-c91a-4314-bc53-d5053316f8cd/volumes" Dec 01 19:00:15 crc kubenswrapper[4935]: I1201 19:00:15.194315 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8","Type":"ContainerStarted","Data":"4ac3d5b72d2629088ae98754763b4abe73558f8b7c17f37f2963165eff9c2a52"} Dec 01 19:00:17 crc kubenswrapper[4935]: I1201 19:00:17.219380 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8","Type":"ContainerStarted","Data":"ac7f73c83206c0d816b2b7d17b6aa0aa861ed97e0159a32736af5f1bbb190869"} Dec 01 19:00:18 crc kubenswrapper[4935]: I1201 19:00:18.241344 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"33ca5248-0c8e-4b6d-81ad-15c4c328dbd8","Type":"ContainerStarted","Data":"a2a3c53f22ad6becabcab58322dc6016a4230cd0f7400a75727762087456c73e"} Dec 01 19:00:18 crc kubenswrapper[4935]: I1201 19:00:18.316668 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.147323662 podStartE2EDuration="6.316646651s" podCreationTimestamp="2025-12-01 19:00:12 +0000 UTC" firstStartedPulling="2025-12-01 19:00:13.519390469 +0000 UTC m=+1827.541019728" lastFinishedPulling="2025-12-01 19:00:17.688713438 +0000 UTC m=+1831.710342717" observedRunningTime="2025-12-01 19:00:18.26035974 +0000 UTC m=+1832.281989009" watchObservedRunningTime="2025-12-01 19:00:18.316646651 +0000 UTC m=+1832.338275910" Dec 01 19:00:18 crc kubenswrapper[4935]: I1201 19:00:18.507934 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 19:00:18 crc kubenswrapper[4935]: E1201 19:00:18.508437 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:00:29 crc kubenswrapper[4935]: I1201 19:00:29.509940 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 19:00:29 crc kubenswrapper[4935]: E1201 19:00:29.511443 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:00:41 crc kubenswrapper[4935]: I1201 19:00:41.510428 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 19:00:41 crc kubenswrapper[4935]: E1201 19:00:41.511927 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:00:50 crc kubenswrapper[4935]: I1201 19:00:50.596318 4935 patch_prober.go:28] interesting pod/metrics-server-67dccdf457-pfhr5 container/metrics-server namespace/openshift-monitoring: Liveness probe status=failure output="Get \"https://10.217.0.76:10250/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 01 19:00:50 crc kubenswrapper[4935]: I1201 19:00:50.596862 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" podUID="d552028c-c467-4640-a164-66283cc6ba3b" containerName="metrics-server" probeResult="failure" output="Get \"https://10.217.0.76:10250/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 01 19:00:50 crc kubenswrapper[4935]: I1201 19:00:50.605965 4935 patch_prober.go:28] interesting pod/metrics-server-67dccdf457-pfhr5 container/metrics-server namespace/openshift-monitoring: Readiness probe status=failure output="Get \"https://10.217.0.76:10250/livez\": context deadline exceeded" start-of-body= Dec 01 19:00:50 crc kubenswrapper[4935]: I1201 19:00:50.606056 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-monitoring/metrics-server-67dccdf457-pfhr5" podUID="d552028c-c467-4640-a164-66283cc6ba3b" containerName="metrics-server" probeResult="failure" output="Get \"https://10.217.0.76:10250/livez\": context deadline exceeded" Dec 01 19:00:54 crc kubenswrapper[4935]: I1201 19:00:54.508663 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 19:00:54 crc kubenswrapper[4935]: E1201 19:00:54.509396 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.185225 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29410261-xl5dh"] Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.189733 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.208655 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29410261-xl5dh"] Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.295658 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-config-data\") pod \"keystone-cron-29410261-xl5dh\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.295833 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-fernet-keys\") pod \"keystone-cron-29410261-xl5dh\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.295883 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rcps\" (UniqueName: \"kubernetes.io/projected/1c5079eb-78df-422b-85e6-0f0cfed5f451-kube-api-access-8rcps\") pod \"keystone-cron-29410261-xl5dh\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.295912 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-combined-ca-bundle\") pod \"keystone-cron-29410261-xl5dh\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.399410 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-fernet-keys\") pod \"keystone-cron-29410261-xl5dh\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.399516 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rcps\" (UniqueName: \"kubernetes.io/projected/1c5079eb-78df-422b-85e6-0f0cfed5f451-kube-api-access-8rcps\") pod \"keystone-cron-29410261-xl5dh\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.399551 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-combined-ca-bundle\") pod \"keystone-cron-29410261-xl5dh\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.399796 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-config-data\") pod \"keystone-cron-29410261-xl5dh\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.405761 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-fernet-keys\") pod \"keystone-cron-29410261-xl5dh\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.407599 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-combined-ca-bundle\") pod \"keystone-cron-29410261-xl5dh\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.414834 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-config-data\") pod \"keystone-cron-29410261-xl5dh\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.419906 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rcps\" (UniqueName: \"kubernetes.io/projected/1c5079eb-78df-422b-85e6-0f0cfed5f451-kube-api-access-8rcps\") pod \"keystone-cron-29410261-xl5dh\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:00 crc kubenswrapper[4935]: I1201 19:01:00.540227 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:01 crc kubenswrapper[4935]: I1201 19:01:01.087546 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29410261-xl5dh"] Dec 01 19:01:01 crc kubenswrapper[4935]: I1201 19:01:01.809200 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410261-xl5dh" event={"ID":"1c5079eb-78df-422b-85e6-0f0cfed5f451","Type":"ContainerStarted","Data":"8dbb90a7e1d24345de5dd4ab8cef2e9863ce2b1c0682dfe4bf7a69d3bd1af126"} Dec 01 19:01:01 crc kubenswrapper[4935]: I1201 19:01:01.809632 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410261-xl5dh" event={"ID":"1c5079eb-78df-422b-85e6-0f0cfed5f451","Type":"ContainerStarted","Data":"2a8fe20e3163d66b1470bc82418435c1d0b7ed75f7bbbf67001e074dd5206d36"} Dec 01 19:01:01 crc kubenswrapper[4935]: I1201 19:01:01.834859 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29410261-xl5dh" podStartSLOduration=1.834844715 podStartE2EDuration="1.834844715s" podCreationTimestamp="2025-12-01 19:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:01:01.826632714 +0000 UTC m=+1875.848261973" watchObservedRunningTime="2025-12-01 19:01:01.834844715 +0000 UTC m=+1875.856473974" Dec 01 19:01:04 crc kubenswrapper[4935]: I1201 19:01:04.852012 4935 generic.go:334] "Generic (PLEG): container finished" podID="1c5079eb-78df-422b-85e6-0f0cfed5f451" containerID="8dbb90a7e1d24345de5dd4ab8cef2e9863ce2b1c0682dfe4bf7a69d3bd1af126" exitCode=0 Dec 01 19:01:04 crc kubenswrapper[4935]: I1201 19:01:04.852188 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410261-xl5dh" event={"ID":"1c5079eb-78df-422b-85e6-0f0cfed5f451","Type":"ContainerDied","Data":"8dbb90a7e1d24345de5dd4ab8cef2e9863ce2b1c0682dfe4bf7a69d3bd1af126"} Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.312101 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.487821 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-combined-ca-bundle\") pod \"1c5079eb-78df-422b-85e6-0f0cfed5f451\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.487957 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-config-data\") pod \"1c5079eb-78df-422b-85e6-0f0cfed5f451\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.488127 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-fernet-keys\") pod \"1c5079eb-78df-422b-85e6-0f0cfed5f451\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.488236 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rcps\" (UniqueName: \"kubernetes.io/projected/1c5079eb-78df-422b-85e6-0f0cfed5f451-kube-api-access-8rcps\") pod \"1c5079eb-78df-422b-85e6-0f0cfed5f451\" (UID: \"1c5079eb-78df-422b-85e6-0f0cfed5f451\") " Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.495645 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1c5079eb-78df-422b-85e6-0f0cfed5f451" (UID: "1c5079eb-78df-422b-85e6-0f0cfed5f451"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.495844 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c5079eb-78df-422b-85e6-0f0cfed5f451-kube-api-access-8rcps" (OuterVolumeSpecName: "kube-api-access-8rcps") pod "1c5079eb-78df-422b-85e6-0f0cfed5f451" (UID: "1c5079eb-78df-422b-85e6-0f0cfed5f451"). InnerVolumeSpecName "kube-api-access-8rcps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.523692 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.525956 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c5079eb-78df-422b-85e6-0f0cfed5f451" (UID: "1c5079eb-78df-422b-85e6-0f0cfed5f451"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.579862 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-config-data" (OuterVolumeSpecName: "config-data") pod "1c5079eb-78df-422b-85e6-0f0cfed5f451" (UID: "1c5079eb-78df-422b-85e6-0f0cfed5f451"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.591443 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rcps\" (UniqueName: \"kubernetes.io/projected/1c5079eb-78df-422b-85e6-0f0cfed5f451-kube-api-access-8rcps\") on node \"crc\" DevicePath \"\"" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.591475 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.591486 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.591495 4935 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c5079eb-78df-422b-85e6-0f0cfed5f451-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.822652 4935 scope.go:117] "RemoveContainer" containerID="1f7789a0143a24acdaeef7679e202c29ad04517bbff9053eea70cf4605eb84ad" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.844834 4935 scope.go:117] "RemoveContainer" containerID="9bd655859982a469f3fa7f88256dd912f7215da1223cbaa4a0d90e6354469a70" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.886042 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410261-xl5dh" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.886664 4935 scope.go:117] "RemoveContainer" containerID="32dfb63503d99736fd10ac1fcfe6803feeae2661f5b6cb558876d87015ad4dd0" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.886039 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410261-xl5dh" event={"ID":"1c5079eb-78df-422b-85e6-0f0cfed5f451","Type":"ContainerDied","Data":"2a8fe20e3163d66b1470bc82418435c1d0b7ed75f7bbbf67001e074dd5206d36"} Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.888012 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a8fe20e3163d66b1470bc82418435c1d0b7ed75f7bbbf67001e074dd5206d36" Dec 01 19:01:06 crc kubenswrapper[4935]: I1201 19:01:06.890124 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"65f55d9794ae9f16c4bcf6f9b1370c7d8f50ea2498f356417eb8e5a61df9d9bb"} Dec 01 19:01:11 crc kubenswrapper[4935]: I1201 19:01:11.287952 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-6b89d75d8c-8d6z5" podUID="d97b7792-f596-4358-8b02-1ae1368ac68d" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.775509 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-b5fvn"] Dec 01 19:01:20 crc kubenswrapper[4935]: E1201 19:01:20.776694 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5079eb-78df-422b-85e6-0f0cfed5f451" containerName="keystone-cron" Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.776712 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5079eb-78df-422b-85e6-0f0cfed5f451" containerName="keystone-cron" Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.777076 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5079eb-78df-422b-85e6-0f0cfed5f451" containerName="keystone-cron" Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.779873 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.801910 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b5fvn"] Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.832786 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5ae1ef2-88df-40ec-bc28-1f7b17d03cad-catalog-content\") pod \"certified-operators-b5fvn\" (UID: \"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad\") " pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.832859 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5ae1ef2-88df-40ec-bc28-1f7b17d03cad-utilities\") pod \"certified-operators-b5fvn\" (UID: \"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad\") " pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.833105 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrnfc\" (UniqueName: \"kubernetes.io/projected/e5ae1ef2-88df-40ec-bc28-1f7b17d03cad-kube-api-access-wrnfc\") pod \"certified-operators-b5fvn\" (UID: \"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad\") " pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.935133 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrnfc\" (UniqueName: \"kubernetes.io/projected/e5ae1ef2-88df-40ec-bc28-1f7b17d03cad-kube-api-access-wrnfc\") pod \"certified-operators-b5fvn\" (UID: \"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad\") " pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.935256 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5ae1ef2-88df-40ec-bc28-1f7b17d03cad-catalog-content\") pod \"certified-operators-b5fvn\" (UID: \"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad\") " pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.935283 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5ae1ef2-88df-40ec-bc28-1f7b17d03cad-utilities\") pod \"certified-operators-b5fvn\" (UID: \"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad\") " pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.936316 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5ae1ef2-88df-40ec-bc28-1f7b17d03cad-utilities\") pod \"certified-operators-b5fvn\" (UID: \"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad\") " pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.936390 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5ae1ef2-88df-40ec-bc28-1f7b17d03cad-catalog-content\") pod \"certified-operators-b5fvn\" (UID: \"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad\") " pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:20 crc kubenswrapper[4935]: I1201 19:01:20.959702 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrnfc\" (UniqueName: \"kubernetes.io/projected/e5ae1ef2-88df-40ec-bc28-1f7b17d03cad-kube-api-access-wrnfc\") pod \"certified-operators-b5fvn\" (UID: \"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad\") " pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:21 crc kubenswrapper[4935]: I1201 19:01:21.115912 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:21 crc kubenswrapper[4935]: I1201 19:01:21.688129 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b5fvn"] Dec 01 19:01:21 crc kubenswrapper[4935]: W1201 19:01:21.695024 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5ae1ef2_88df_40ec_bc28_1f7b17d03cad.slice/crio-8006baff05fd281c07300487fb16e73013477ea6ef577395d19837dc21993b0c WatchSource:0}: Error finding container 8006baff05fd281c07300487fb16e73013477ea6ef577395d19837dc21993b0c: Status 404 returned error can't find the container with id 8006baff05fd281c07300487fb16e73013477ea6ef577395d19837dc21993b0c Dec 01 19:01:22 crc kubenswrapper[4935]: I1201 19:01:22.131190 4935 generic.go:334] "Generic (PLEG): container finished" podID="e5ae1ef2-88df-40ec-bc28-1f7b17d03cad" containerID="e4c660726417233f3a5d53cdf541281de5b20d5775b2a2d382b7a3f7c50213c8" exitCode=0 Dec 01 19:01:22 crc kubenswrapper[4935]: I1201 19:01:22.131266 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b5fvn" event={"ID":"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad","Type":"ContainerDied","Data":"e4c660726417233f3a5d53cdf541281de5b20d5775b2a2d382b7a3f7c50213c8"} Dec 01 19:01:22 crc kubenswrapper[4935]: I1201 19:01:22.131662 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b5fvn" event={"ID":"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad","Type":"ContainerStarted","Data":"8006baff05fd281c07300487fb16e73013477ea6ef577395d19837dc21993b0c"} Dec 01 19:01:28 crc kubenswrapper[4935]: I1201 19:01:28.210962 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b5fvn" event={"ID":"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad","Type":"ContainerStarted","Data":"f35089bbc5b1c06655a8fd6eb7353bd30b6a22f5d9b58b3b58aef03e13e25bb9"} Dec 01 19:01:29 crc kubenswrapper[4935]: I1201 19:01:29.231050 4935 generic.go:334] "Generic (PLEG): container finished" podID="e5ae1ef2-88df-40ec-bc28-1f7b17d03cad" containerID="f35089bbc5b1c06655a8fd6eb7353bd30b6a22f5d9b58b3b58aef03e13e25bb9" exitCode=0 Dec 01 19:01:29 crc kubenswrapper[4935]: I1201 19:01:29.231214 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b5fvn" event={"ID":"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad","Type":"ContainerDied","Data":"f35089bbc5b1c06655a8fd6eb7353bd30b6a22f5d9b58b3b58aef03e13e25bb9"} Dec 01 19:01:30 crc kubenswrapper[4935]: I1201 19:01:30.248965 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b5fvn" event={"ID":"e5ae1ef2-88df-40ec-bc28-1f7b17d03cad","Type":"ContainerStarted","Data":"b66b0664037cbeea4f73a9fc7e493ad9f803a39a4c8720d1d10a70602a75ed22"} Dec 01 19:01:30 crc kubenswrapper[4935]: I1201 19:01:30.302241 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-b5fvn" podStartSLOduration=2.616881909 podStartE2EDuration="10.302212839s" podCreationTimestamp="2025-12-01 19:01:20 +0000 UTC" firstStartedPulling="2025-12-01 19:01:22.133914128 +0000 UTC m=+1896.155543387" lastFinishedPulling="2025-12-01 19:01:29.819245018 +0000 UTC m=+1903.840874317" observedRunningTime="2025-12-01 19:01:30.279673492 +0000 UTC m=+1904.301302761" watchObservedRunningTime="2025-12-01 19:01:30.302212839 +0000 UTC m=+1904.323842108" Dec 01 19:01:31 crc kubenswrapper[4935]: I1201 19:01:31.116516 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:31 crc kubenswrapper[4935]: I1201 19:01:31.117063 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:32 crc kubenswrapper[4935]: I1201 19:01:32.196539 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-b5fvn" podUID="e5ae1ef2-88df-40ec-bc28-1f7b17d03cad" containerName="registry-server" probeResult="failure" output=< Dec 01 19:01:32 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 19:01:32 crc kubenswrapper[4935]: > Dec 01 19:01:41 crc kubenswrapper[4935]: I1201 19:01:41.186950 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:41 crc kubenswrapper[4935]: I1201 19:01:41.286975 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-b5fvn" Dec 01 19:01:41 crc kubenswrapper[4935]: I1201 19:01:41.462307 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b5fvn"] Dec 01 19:01:41 crc kubenswrapper[4935]: I1201 19:01:41.487716 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jd8nr"] Dec 01 19:01:41 crc kubenswrapper[4935]: I1201 19:01:41.487946 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jd8nr" podUID="efec9635-2457-41e3-8477-b6f6081dc30f" containerName="registry-server" containerID="cri-o://3bbe2e7b1a096cdd92cf9d26d72edece07a336371e2f68cb25d7cbe25b65c080" gracePeriod=2 Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.025031 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.073687 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efec9635-2457-41e3-8477-b6f6081dc30f-utilities\") pod \"efec9635-2457-41e3-8477-b6f6081dc30f\" (UID: \"efec9635-2457-41e3-8477-b6f6081dc30f\") " Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.073839 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lqlv\" (UniqueName: \"kubernetes.io/projected/efec9635-2457-41e3-8477-b6f6081dc30f-kube-api-access-8lqlv\") pod \"efec9635-2457-41e3-8477-b6f6081dc30f\" (UID: \"efec9635-2457-41e3-8477-b6f6081dc30f\") " Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.074045 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efec9635-2457-41e3-8477-b6f6081dc30f-catalog-content\") pod \"efec9635-2457-41e3-8477-b6f6081dc30f\" (UID: \"efec9635-2457-41e3-8477-b6f6081dc30f\") " Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.076041 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efec9635-2457-41e3-8477-b6f6081dc30f-utilities" (OuterVolumeSpecName: "utilities") pod "efec9635-2457-41e3-8477-b6f6081dc30f" (UID: "efec9635-2457-41e3-8477-b6f6081dc30f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.085447 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efec9635-2457-41e3-8477-b6f6081dc30f-kube-api-access-8lqlv" (OuterVolumeSpecName: "kube-api-access-8lqlv") pod "efec9635-2457-41e3-8477-b6f6081dc30f" (UID: "efec9635-2457-41e3-8477-b6f6081dc30f"). InnerVolumeSpecName "kube-api-access-8lqlv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.141760 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efec9635-2457-41e3-8477-b6f6081dc30f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "efec9635-2457-41e3-8477-b6f6081dc30f" (UID: "efec9635-2457-41e3-8477-b6f6081dc30f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.177217 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efec9635-2457-41e3-8477-b6f6081dc30f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.177253 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efec9635-2457-41e3-8477-b6f6081dc30f-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.177262 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lqlv\" (UniqueName: \"kubernetes.io/projected/efec9635-2457-41e3-8477-b6f6081dc30f-kube-api-access-8lqlv\") on node \"crc\" DevicePath \"\"" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.435343 4935 generic.go:334] "Generic (PLEG): container finished" podID="efec9635-2457-41e3-8477-b6f6081dc30f" containerID="3bbe2e7b1a096cdd92cf9d26d72edece07a336371e2f68cb25d7cbe25b65c080" exitCode=0 Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.435404 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jd8nr" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.435428 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd8nr" event={"ID":"efec9635-2457-41e3-8477-b6f6081dc30f","Type":"ContainerDied","Data":"3bbe2e7b1a096cdd92cf9d26d72edece07a336371e2f68cb25d7cbe25b65c080"} Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.435710 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd8nr" event={"ID":"efec9635-2457-41e3-8477-b6f6081dc30f","Type":"ContainerDied","Data":"43bed21d7055abce0a981bcbfcd26057454a8cfe8445460ed685c8b74d0b7a85"} Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.435754 4935 scope.go:117] "RemoveContainer" containerID="3bbe2e7b1a096cdd92cf9d26d72edece07a336371e2f68cb25d7cbe25b65c080" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.468522 4935 scope.go:117] "RemoveContainer" containerID="365ed6df528c4a7da2e4f4169085e76f9ae82f1e244d521ff4270381a8eeca1b" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.501566 4935 scope.go:117] "RemoveContainer" containerID="54a09b2370c20ebe31fb5695e3df987b020de8e85acdf0027eb94a35f8ef3cb4" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.521143 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jd8nr"] Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.523201 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jd8nr"] Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.581831 4935 scope.go:117] "RemoveContainer" containerID="3bbe2e7b1a096cdd92cf9d26d72edece07a336371e2f68cb25d7cbe25b65c080" Dec 01 19:01:42 crc kubenswrapper[4935]: E1201 19:01:42.582643 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bbe2e7b1a096cdd92cf9d26d72edece07a336371e2f68cb25d7cbe25b65c080\": container with ID starting with 3bbe2e7b1a096cdd92cf9d26d72edece07a336371e2f68cb25d7cbe25b65c080 not found: ID does not exist" containerID="3bbe2e7b1a096cdd92cf9d26d72edece07a336371e2f68cb25d7cbe25b65c080" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.582713 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bbe2e7b1a096cdd92cf9d26d72edece07a336371e2f68cb25d7cbe25b65c080"} err="failed to get container status \"3bbe2e7b1a096cdd92cf9d26d72edece07a336371e2f68cb25d7cbe25b65c080\": rpc error: code = NotFound desc = could not find container \"3bbe2e7b1a096cdd92cf9d26d72edece07a336371e2f68cb25d7cbe25b65c080\": container with ID starting with 3bbe2e7b1a096cdd92cf9d26d72edece07a336371e2f68cb25d7cbe25b65c080 not found: ID does not exist" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.582756 4935 scope.go:117] "RemoveContainer" containerID="365ed6df528c4a7da2e4f4169085e76f9ae82f1e244d521ff4270381a8eeca1b" Dec 01 19:01:42 crc kubenswrapper[4935]: E1201 19:01:42.583302 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"365ed6df528c4a7da2e4f4169085e76f9ae82f1e244d521ff4270381a8eeca1b\": container with ID starting with 365ed6df528c4a7da2e4f4169085e76f9ae82f1e244d521ff4270381a8eeca1b not found: ID does not exist" containerID="365ed6df528c4a7da2e4f4169085e76f9ae82f1e244d521ff4270381a8eeca1b" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.583347 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"365ed6df528c4a7da2e4f4169085e76f9ae82f1e244d521ff4270381a8eeca1b"} err="failed to get container status \"365ed6df528c4a7da2e4f4169085e76f9ae82f1e244d521ff4270381a8eeca1b\": rpc error: code = NotFound desc = could not find container \"365ed6df528c4a7da2e4f4169085e76f9ae82f1e244d521ff4270381a8eeca1b\": container with ID starting with 365ed6df528c4a7da2e4f4169085e76f9ae82f1e244d521ff4270381a8eeca1b not found: ID does not exist" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.583373 4935 scope.go:117] "RemoveContainer" containerID="54a09b2370c20ebe31fb5695e3df987b020de8e85acdf0027eb94a35f8ef3cb4" Dec 01 19:01:42 crc kubenswrapper[4935]: E1201 19:01:42.584062 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54a09b2370c20ebe31fb5695e3df987b020de8e85acdf0027eb94a35f8ef3cb4\": container with ID starting with 54a09b2370c20ebe31fb5695e3df987b020de8e85acdf0027eb94a35f8ef3cb4 not found: ID does not exist" containerID="54a09b2370c20ebe31fb5695e3df987b020de8e85acdf0027eb94a35f8ef3cb4" Dec 01 19:01:42 crc kubenswrapper[4935]: I1201 19:01:42.584103 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54a09b2370c20ebe31fb5695e3df987b020de8e85acdf0027eb94a35f8ef3cb4"} err="failed to get container status \"54a09b2370c20ebe31fb5695e3df987b020de8e85acdf0027eb94a35f8ef3cb4\": rpc error: code = NotFound desc = could not find container \"54a09b2370c20ebe31fb5695e3df987b020de8e85acdf0027eb94a35f8ef3cb4\": container with ID starting with 54a09b2370c20ebe31fb5695e3df987b020de8e85acdf0027eb94a35f8ef3cb4 not found: ID does not exist" Dec 01 19:01:44 crc kubenswrapper[4935]: I1201 19:01:44.535606 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efec9635-2457-41e3-8477-b6f6081dc30f" path="/var/lib/kubelet/pods/efec9635-2457-41e3-8477-b6f6081dc30f/volumes" Dec 01 19:03:07 crc kubenswrapper[4935]: I1201 19:03:07.094667 4935 scope.go:117] "RemoveContainer" containerID="38634acee11c071d79353a22f3314571d7e6b67e03cc63065a8a043ef3014fab" Dec 01 19:03:09 crc kubenswrapper[4935]: I1201 19:03:09.060514 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-2e51-account-create-update-hwrgm"] Dec 01 19:03:09 crc kubenswrapper[4935]: I1201 19:03:09.085890 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-2e51-account-create-update-hwrgm"] Dec 01 19:03:09 crc kubenswrapper[4935]: I1201 19:03:09.100893 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-605e-account-create-update-bfsft"] Dec 01 19:03:09 crc kubenswrapper[4935]: I1201 19:03:09.122809 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-605e-account-create-update-bfsft"] Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.042115 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mysqld-exporter-openstack-db-create-8pnk2"] Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.055987 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-wph7x"] Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.068968 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mysqld-exporter-openstack-db-create-8pnk2"] Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.079327 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-c9ba-account-create-update-jm7xw"] Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.089059 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-pq64s"] Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.099716 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-c9ba-account-create-update-jm7xw"] Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.109430 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-sgqs7"] Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.118497 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mysqld-exporter-68a7-account-create-update-dpgxj"] Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.127611 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-wph7x"] Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.137188 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-pq64s"] Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.146829 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-sgqs7"] Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.156860 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mysqld-exporter-68a7-account-create-update-dpgxj"] Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.524813 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00b6178d-eeba-450b-b8e6-289bba7db372" path="/var/lib/kubelet/pods/00b6178d-eeba-450b-b8e6-289bba7db372/volumes" Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.525997 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="086eb35c-13fa-4941-8812-712ca0a53fdc" path="/var/lib/kubelet/pods/086eb35c-13fa-4941-8812-712ca0a53fdc/volumes" Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.527944 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="475f0637-3250-46ef-bafa-c3a57c5780a2" path="/var/lib/kubelet/pods/475f0637-3250-46ef-bafa-c3a57c5780a2/volumes" Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.530039 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d1fbcb0-5eaa-4512-bae5-0759240427f7" path="/var/lib/kubelet/pods/7d1fbcb0-5eaa-4512-bae5-0759240427f7/volumes" Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.531993 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82ea2a63-302e-4238-b445-98df3d0bac7d" path="/var/lib/kubelet/pods/82ea2a63-302e-4238-b445-98df3d0bac7d/volumes" Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.533411 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ba93a6b-9a94-4dc1-8bb2-392863cb64ba" path="/var/lib/kubelet/pods/8ba93a6b-9a94-4dc1-8bb2-392863cb64ba/volumes" Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.535219 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2df55b0-7b7c-4330-8c63-c1fdc708b950" path="/var/lib/kubelet/pods/c2df55b0-7b7c-4330-8c63-c1fdc708b950/volumes" Dec 01 19:03:10 crc kubenswrapper[4935]: I1201 19:03:10.537013 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffd39947-9d32-4aca-ac65-83d13c6fc3d9" path="/var/lib/kubelet/pods/ffd39947-9d32-4aca-ac65-83d13c6fc3d9/volumes" Dec 01 19:03:17 crc kubenswrapper[4935]: I1201 19:03:17.050385 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mysqld-exporter-6297-account-create-update-xm8f8"] Dec 01 19:03:17 crc kubenswrapper[4935]: I1201 19:03:17.069301 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mysqld-exporter-6297-account-create-update-xm8f8"] Dec 01 19:03:17 crc kubenswrapper[4935]: I1201 19:03:17.088701 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g"] Dec 01 19:03:17 crc kubenswrapper[4935]: I1201 19:03:17.099594 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mysqld-exporter-openstack-cell1-db-create-hnl8g"] Dec 01 19:03:18 crc kubenswrapper[4935]: I1201 19:03:18.530716 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db74a42b-e575-4904-8c66-e51d66b66278" path="/var/lib/kubelet/pods/db74a42b-e575-4904-8c66-e51d66b66278/volumes" Dec 01 19:03:18 crc kubenswrapper[4935]: I1201 19:03:18.531858 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e720e2eb-6a00-48ff-aac7-5f6cf40dfb70" path="/var/lib/kubelet/pods/e720e2eb-6a00-48ff-aac7-5f6cf40dfb70/volumes" Dec 01 19:03:24 crc kubenswrapper[4935]: I1201 19:03:24.346460 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:03:24 crc kubenswrapper[4935]: I1201 19:03:24.347109 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:03:41 crc kubenswrapper[4935]: I1201 19:03:41.068759 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-bwt89"] Dec 01 19:03:41 crc kubenswrapper[4935]: I1201 19:03:41.088032 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-bwt89"] Dec 01 19:03:41 crc kubenswrapper[4935]: I1201 19:03:41.230728 4935 generic.go:334] "Generic (PLEG): container finished" podID="a552da3f-247b-4339-a48d-79a3c948af00" containerID="aa448f702281f5f7a56190b6a9683320ffb1f06d94bde4fb63875ab1eb82f242" exitCode=0 Dec 01 19:03:41 crc kubenswrapper[4935]: I1201 19:03:41.231190 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" event={"ID":"a552da3f-247b-4339-a48d-79a3c948af00","Type":"ContainerDied","Data":"aa448f702281f5f7a56190b6a9683320ffb1f06d94bde4fb63875ab1eb82f242"} Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.528494 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d4835b9-5a99-43d2-90ef-4beafe03afa7" path="/var/lib/kubelet/pods/1d4835b9-5a99-43d2-90ef-4beafe03afa7/volumes" Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.758360 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.846505 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-inventory\") pod \"a552da3f-247b-4339-a48d-79a3c948af00\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.846804 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-ssh-key\") pod \"a552da3f-247b-4339-a48d-79a3c948af00\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.846900 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7j4f\" (UniqueName: \"kubernetes.io/projected/a552da3f-247b-4339-a48d-79a3c948af00-kube-api-access-c7j4f\") pod \"a552da3f-247b-4339-a48d-79a3c948af00\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.847042 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-bootstrap-combined-ca-bundle\") pod \"a552da3f-247b-4339-a48d-79a3c948af00\" (UID: \"a552da3f-247b-4339-a48d-79a3c948af00\") " Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.859376 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a552da3f-247b-4339-a48d-79a3c948af00-kube-api-access-c7j4f" (OuterVolumeSpecName: "kube-api-access-c7j4f") pod "a552da3f-247b-4339-a48d-79a3c948af00" (UID: "a552da3f-247b-4339-a48d-79a3c948af00"). InnerVolumeSpecName "kube-api-access-c7j4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.859453 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "a552da3f-247b-4339-a48d-79a3c948af00" (UID: "a552da3f-247b-4339-a48d-79a3c948af00"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.890012 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a552da3f-247b-4339-a48d-79a3c948af00" (UID: "a552da3f-247b-4339-a48d-79a3c948af00"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.942708 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-inventory" (OuterVolumeSpecName: "inventory") pod "a552da3f-247b-4339-a48d-79a3c948af00" (UID: "a552da3f-247b-4339-a48d-79a3c948af00"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.950787 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.950825 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.950838 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7j4f\" (UniqueName: \"kubernetes.io/projected/a552da3f-247b-4339-a48d-79a3c948af00-kube-api-access-c7j4f\") on node \"crc\" DevicePath \"\"" Dec 01 19:03:42 crc kubenswrapper[4935]: I1201 19:03:42.950855 4935 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a552da3f-247b-4339-a48d-79a3c948af00-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.259254 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" event={"ID":"a552da3f-247b-4339-a48d-79a3c948af00","Type":"ContainerDied","Data":"8ecd28f91f0f3a28be23e1b2785dc2edbc44abf48275cad0ec8c4dd2849183f5"} Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.259635 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ecd28f91f0f3a28be23e1b2785dc2edbc44abf48275cad0ec8c4dd2849183f5" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.259343 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.396107 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb"] Dec 01 19:03:43 crc kubenswrapper[4935]: E1201 19:03:43.396716 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a552da3f-247b-4339-a48d-79a3c948af00" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.396914 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="a552da3f-247b-4339-a48d-79a3c948af00" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 01 19:03:43 crc kubenswrapper[4935]: E1201 19:03:43.396945 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efec9635-2457-41e3-8477-b6f6081dc30f" containerName="extract-utilities" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.396955 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="efec9635-2457-41e3-8477-b6f6081dc30f" containerName="extract-utilities" Dec 01 19:03:43 crc kubenswrapper[4935]: E1201 19:03:43.396976 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efec9635-2457-41e3-8477-b6f6081dc30f" containerName="registry-server" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.396983 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="efec9635-2457-41e3-8477-b6f6081dc30f" containerName="registry-server" Dec 01 19:03:43 crc kubenswrapper[4935]: E1201 19:03:43.396998 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efec9635-2457-41e3-8477-b6f6081dc30f" containerName="extract-content" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.397005 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="efec9635-2457-41e3-8477-b6f6081dc30f" containerName="extract-content" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.397441 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="efec9635-2457-41e3-8477-b6f6081dc30f" containerName="registry-server" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.397475 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="a552da3f-247b-4339-a48d-79a3c948af00" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.398517 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.400658 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.401748 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.402129 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.402257 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.419992 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb"] Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.462705 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2v4dg\" (UniqueName: \"kubernetes.io/projected/763abe02-1bdf-4403-a139-a15aba539519-kube-api-access-2v4dg\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb\" (UID: \"763abe02-1bdf-4403-a139-a15aba539519\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.462795 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/763abe02-1bdf-4403-a139-a15aba539519-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb\" (UID: \"763abe02-1bdf-4403-a139-a15aba539519\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.463212 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/763abe02-1bdf-4403-a139-a15aba539519-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb\" (UID: \"763abe02-1bdf-4403-a139-a15aba539519\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.565755 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/763abe02-1bdf-4403-a139-a15aba539519-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb\" (UID: \"763abe02-1bdf-4403-a139-a15aba539519\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.566008 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2v4dg\" (UniqueName: \"kubernetes.io/projected/763abe02-1bdf-4403-a139-a15aba539519-kube-api-access-2v4dg\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb\" (UID: \"763abe02-1bdf-4403-a139-a15aba539519\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.566101 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/763abe02-1bdf-4403-a139-a15aba539519-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb\" (UID: \"763abe02-1bdf-4403-a139-a15aba539519\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.572785 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/763abe02-1bdf-4403-a139-a15aba539519-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb\" (UID: \"763abe02-1bdf-4403-a139-a15aba539519\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.573848 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/763abe02-1bdf-4403-a139-a15aba539519-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb\" (UID: \"763abe02-1bdf-4403-a139-a15aba539519\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.594611 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2v4dg\" (UniqueName: \"kubernetes.io/projected/763abe02-1bdf-4403-a139-a15aba539519-kube-api-access-2v4dg\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb\" (UID: \"763abe02-1bdf-4403-a139-a15aba539519\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.743649 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.952703 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6bwbs"] Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.956673 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:43 crc kubenswrapper[4935]: I1201 19:03:43.967958 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6bwbs"] Dec 01 19:03:44 crc kubenswrapper[4935]: I1201 19:03:44.079971 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blnsl\" (UniqueName: \"kubernetes.io/projected/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-kube-api-access-blnsl\") pod \"redhat-marketplace-6bwbs\" (UID: \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\") " pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:44 crc kubenswrapper[4935]: I1201 19:03:44.080061 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-utilities\") pod \"redhat-marketplace-6bwbs\" (UID: \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\") " pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:44 crc kubenswrapper[4935]: I1201 19:03:44.080115 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-catalog-content\") pod \"redhat-marketplace-6bwbs\" (UID: \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\") " pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:44 crc kubenswrapper[4935]: I1201 19:03:44.182667 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blnsl\" (UniqueName: \"kubernetes.io/projected/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-kube-api-access-blnsl\") pod \"redhat-marketplace-6bwbs\" (UID: \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\") " pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:44 crc kubenswrapper[4935]: I1201 19:03:44.182756 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-utilities\") pod \"redhat-marketplace-6bwbs\" (UID: \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\") " pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:44 crc kubenswrapper[4935]: I1201 19:03:44.182810 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-catalog-content\") pod \"redhat-marketplace-6bwbs\" (UID: \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\") " pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:44 crc kubenswrapper[4935]: I1201 19:03:44.183350 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-utilities\") pod \"redhat-marketplace-6bwbs\" (UID: \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\") " pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:44 crc kubenswrapper[4935]: I1201 19:03:44.183445 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-catalog-content\") pod \"redhat-marketplace-6bwbs\" (UID: \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\") " pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:44 crc kubenswrapper[4935]: I1201 19:03:44.199103 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blnsl\" (UniqueName: \"kubernetes.io/projected/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-kube-api-access-blnsl\") pod \"redhat-marketplace-6bwbs\" (UID: \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\") " pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:44 crc kubenswrapper[4935]: I1201 19:03:44.293389 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:44 crc kubenswrapper[4935]: I1201 19:03:44.460741 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb"] Dec 01 19:03:44 crc kubenswrapper[4935]: I1201 19:03:44.469975 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 19:03:44 crc kubenswrapper[4935]: I1201 19:03:44.826622 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6bwbs"] Dec 01 19:03:44 crc kubenswrapper[4935]: W1201 19:03:44.830399 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57e49a_0622_49c4_9c2c_1b744f01aa0a.slice/crio-db769bc775ad1c14bff9168a8f4f514b1a00f02c8e6414829b16b43f09e442e3 WatchSource:0}: Error finding container db769bc775ad1c14bff9168a8f4f514b1a00f02c8e6414829b16b43f09e442e3: Status 404 returned error can't find the container with id db769bc775ad1c14bff9168a8f4f514b1a00f02c8e6414829b16b43f09e442e3 Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.051845 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-d7l9s"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.068349 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-f9lxk"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.079543 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-20d7-account-create-update-bgg6d"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.094121 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-d7l9s"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.106220 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-ec83-account-create-update-n6hqd"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.116323 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-f9lxk"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.129690 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-20d7-account-create-update-bgg6d"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.141884 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-c7da-account-create-update-hprpg"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.169398 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-ec83-account-create-update-n6hqd"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.190820 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-c7da-account-create-update-hprpg"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.201690 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-dedb-account-create-update-dhdlw"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.211524 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-74w75"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.220657 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-74w75"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.229781 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-dedb-account-create-update-dhdlw"] Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.282649 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" event={"ID":"763abe02-1bdf-4403-a139-a15aba539519","Type":"ContainerStarted","Data":"728d5994bebe0b9c59ca41fd282d563af12ffbb07f72d9da4b258fb732595356"} Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.284656 4935 generic.go:334] "Generic (PLEG): container finished" podID="2f57e49a-0622-49c4-9c2c-1b744f01aa0a" containerID="289ae5195d11eb8b37c2e81d549cffa65248c8660b6665ea73e9755cb5e26fcb" exitCode=0 Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.284684 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6bwbs" event={"ID":"2f57e49a-0622-49c4-9c2c-1b744f01aa0a","Type":"ContainerDied","Data":"289ae5195d11eb8b37c2e81d549cffa65248c8660b6665ea73e9755cb5e26fcb"} Dec 01 19:03:45 crc kubenswrapper[4935]: I1201 19:03:45.284701 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6bwbs" event={"ID":"2f57e49a-0622-49c4-9c2c-1b744f01aa0a","Type":"ContainerStarted","Data":"db769bc775ad1c14bff9168a8f4f514b1a00f02c8e6414829b16b43f09e442e3"} Dec 01 19:03:46 crc kubenswrapper[4935]: I1201 19:03:46.302214 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" event={"ID":"763abe02-1bdf-4403-a139-a15aba539519","Type":"ContainerStarted","Data":"e1e7e3a976b808dd41a585b8863ac44b2832fd3910b73393dac05aa817d94d8c"} Dec 01 19:03:46 crc kubenswrapper[4935]: I1201 19:03:46.306470 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6bwbs" event={"ID":"2f57e49a-0622-49c4-9c2c-1b744f01aa0a","Type":"ContainerStarted","Data":"f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0"} Dec 01 19:03:46 crc kubenswrapper[4935]: I1201 19:03:46.323754 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" podStartSLOduration=2.6677875110000002 podStartE2EDuration="3.323722795s" podCreationTimestamp="2025-12-01 19:03:43 +0000 UTC" firstStartedPulling="2025-12-01 19:03:44.469768297 +0000 UTC m=+2038.491397556" lastFinishedPulling="2025-12-01 19:03:45.125703571 +0000 UTC m=+2039.147332840" observedRunningTime="2025-12-01 19:03:46.32170654 +0000 UTC m=+2040.343335799" watchObservedRunningTime="2025-12-01 19:03:46.323722795 +0000 UTC m=+2040.345352104" Dec 01 19:03:46 crc kubenswrapper[4935]: I1201 19:03:46.543345 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16f3dbdb-7bea-419d-accc-89a7f288c977" path="/var/lib/kubelet/pods/16f3dbdb-7bea-419d-accc-89a7f288c977/volumes" Dec 01 19:03:46 crc kubenswrapper[4935]: I1201 19:03:46.545631 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="350567c5-3a10-4bd8-b57c-f69aa1b581bc" path="/var/lib/kubelet/pods/350567c5-3a10-4bd8-b57c-f69aa1b581bc/volumes" Dec 01 19:03:46 crc kubenswrapper[4935]: I1201 19:03:46.548310 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6318f430-920a-42fa-82fa-3543844bb06a" path="/var/lib/kubelet/pods/6318f430-920a-42fa-82fa-3543844bb06a/volumes" Dec 01 19:03:46 crc kubenswrapper[4935]: I1201 19:03:46.552049 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="709a21ee-5142-492b-9b88-6f39cb92473d" path="/var/lib/kubelet/pods/709a21ee-5142-492b-9b88-6f39cb92473d/volumes" Dec 01 19:03:46 crc kubenswrapper[4935]: I1201 19:03:46.555139 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b468f617-1ac4-4187-a32a-a35e87881f70" path="/var/lib/kubelet/pods/b468f617-1ac4-4187-a32a-a35e87881f70/volumes" Dec 01 19:03:46 crc kubenswrapper[4935]: I1201 19:03:46.557300 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcb37c05-2790-49a1-ab92-5301bd8cb642" path="/var/lib/kubelet/pods/dcb37c05-2790-49a1-ab92-5301bd8cb642/volumes" Dec 01 19:03:46 crc kubenswrapper[4935]: I1201 19:03:46.559106 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4bc86e6-3397-42cb-9b81-0ca0db2821f4" path="/var/lib/kubelet/pods/f4bc86e6-3397-42cb-9b81-0ca0db2821f4/volumes" Dec 01 19:03:47 crc kubenswrapper[4935]: I1201 19:03:47.322967 4935 generic.go:334] "Generic (PLEG): container finished" podID="2f57e49a-0622-49c4-9c2c-1b744f01aa0a" containerID="f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0" exitCode=0 Dec 01 19:03:47 crc kubenswrapper[4935]: I1201 19:03:47.323029 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6bwbs" event={"ID":"2f57e49a-0622-49c4-9c2c-1b744f01aa0a","Type":"ContainerDied","Data":"f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0"} Dec 01 19:03:48 crc kubenswrapper[4935]: E1201 19:03:48.277600 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57e49a_0622_49c4_9c2c_1b744f01aa0a.slice/crio-f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:03:48 crc kubenswrapper[4935]: E1201 19:03:48.277692 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57e49a_0622_49c4_9c2c_1b744f01aa0a.slice/crio-f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:03:48 crc kubenswrapper[4935]: I1201 19:03:48.337763 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6bwbs" event={"ID":"2f57e49a-0622-49c4-9c2c-1b744f01aa0a","Type":"ContainerStarted","Data":"a2bbf9dec29cf444201915b2d3a693d2429e4d4fd8d16ebe5d268603a8f62d09"} Dec 01 19:03:48 crc kubenswrapper[4935]: I1201 19:03:48.364071 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6bwbs" podStartSLOduration=2.665098307 podStartE2EDuration="5.364050169s" podCreationTimestamp="2025-12-01 19:03:43 +0000 UTC" firstStartedPulling="2025-12-01 19:03:45.287034952 +0000 UTC m=+2039.308664211" lastFinishedPulling="2025-12-01 19:03:47.985986804 +0000 UTC m=+2042.007616073" observedRunningTime="2025-12-01 19:03:48.359096252 +0000 UTC m=+2042.380725511" watchObservedRunningTime="2025-12-01 19:03:48.364050169 +0000 UTC m=+2042.385679438" Dec 01 19:03:50 crc kubenswrapper[4935]: I1201 19:03:50.050530 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-lmhvh"] Dec 01 19:03:50 crc kubenswrapper[4935]: I1201 19:03:50.062322 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-lmhvh"] Dec 01 19:03:50 crc kubenswrapper[4935]: I1201 19:03:50.529012 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e086eca-2e25-4bf7-9d95-807b71ab8945" path="/var/lib/kubelet/pods/7e086eca-2e25-4bf7-9d95-807b71ab8945/volumes" Dec 01 19:03:51 crc kubenswrapper[4935]: E1201 19:03:51.657569 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57e49a_0622_49c4_9c2c_1b744f01aa0a.slice/crio-f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:03:54 crc kubenswrapper[4935]: I1201 19:03:54.293695 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:54 crc kubenswrapper[4935]: I1201 19:03:54.294232 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:54 crc kubenswrapper[4935]: I1201 19:03:54.345834 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:03:54 crc kubenswrapper[4935]: I1201 19:03:54.345918 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:03:54 crc kubenswrapper[4935]: I1201 19:03:54.362445 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:54 crc kubenswrapper[4935]: I1201 19:03:54.539370 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:54 crc kubenswrapper[4935]: I1201 19:03:54.644976 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6bwbs"] Dec 01 19:03:54 crc kubenswrapper[4935]: E1201 19:03:54.979833 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57e49a_0622_49c4_9c2c_1b744f01aa0a.slice/crio-f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:03:56 crc kubenswrapper[4935]: I1201 19:03:56.474382 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6bwbs" podUID="2f57e49a-0622-49c4-9c2c-1b744f01aa0a" containerName="registry-server" containerID="cri-o://a2bbf9dec29cf444201915b2d3a693d2429e4d4fd8d16ebe5d268603a8f62d09" gracePeriod=2 Dec 01 19:03:56 crc kubenswrapper[4935]: I1201 19:03:56.984875 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.167759 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-catalog-content\") pod \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\" (UID: \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\") " Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.168106 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-utilities\") pod \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\" (UID: \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\") " Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.168152 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blnsl\" (UniqueName: \"kubernetes.io/projected/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-kube-api-access-blnsl\") pod \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\" (UID: \"2f57e49a-0622-49c4-9c2c-1b744f01aa0a\") " Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.168722 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-utilities" (OuterVolumeSpecName: "utilities") pod "2f57e49a-0622-49c4-9c2c-1b744f01aa0a" (UID: "2f57e49a-0622-49c4-9c2c-1b744f01aa0a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.175071 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-kube-api-access-blnsl" (OuterVolumeSpecName: "kube-api-access-blnsl") pod "2f57e49a-0622-49c4-9c2c-1b744f01aa0a" (UID: "2f57e49a-0622-49c4-9c2c-1b744f01aa0a"). InnerVolumeSpecName "kube-api-access-blnsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.188703 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f57e49a-0622-49c4-9c2c-1b744f01aa0a" (UID: "2f57e49a-0622-49c4-9c2c-1b744f01aa0a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.270707 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blnsl\" (UniqueName: \"kubernetes.io/projected/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-kube-api-access-blnsl\") on node \"crc\" DevicePath \"\"" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.270743 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.270754 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f57e49a-0622-49c4-9c2c-1b744f01aa0a-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.489771 4935 generic.go:334] "Generic (PLEG): container finished" podID="2f57e49a-0622-49c4-9c2c-1b744f01aa0a" containerID="a2bbf9dec29cf444201915b2d3a693d2429e4d4fd8d16ebe5d268603a8f62d09" exitCode=0 Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.489823 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6bwbs" event={"ID":"2f57e49a-0622-49c4-9c2c-1b744f01aa0a","Type":"ContainerDied","Data":"a2bbf9dec29cf444201915b2d3a693d2429e4d4fd8d16ebe5d268603a8f62d09"} Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.489858 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6bwbs" event={"ID":"2f57e49a-0622-49c4-9c2c-1b744f01aa0a","Type":"ContainerDied","Data":"db769bc775ad1c14bff9168a8f4f514b1a00f02c8e6414829b16b43f09e442e3"} Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.489883 4935 scope.go:117] "RemoveContainer" containerID="a2bbf9dec29cf444201915b2d3a693d2429e4d4fd8d16ebe5d268603a8f62d09" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.489925 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6bwbs" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.522053 4935 scope.go:117] "RemoveContainer" containerID="f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.543531 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6bwbs"] Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.546612 4935 scope.go:117] "RemoveContainer" containerID="289ae5195d11eb8b37c2e81d549cffa65248c8660b6665ea73e9755cb5e26fcb" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.557201 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6bwbs"] Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.603545 4935 scope.go:117] "RemoveContainer" containerID="a2bbf9dec29cf444201915b2d3a693d2429e4d4fd8d16ebe5d268603a8f62d09" Dec 01 19:03:57 crc kubenswrapper[4935]: E1201 19:03:57.603935 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2bbf9dec29cf444201915b2d3a693d2429e4d4fd8d16ebe5d268603a8f62d09\": container with ID starting with a2bbf9dec29cf444201915b2d3a693d2429e4d4fd8d16ebe5d268603a8f62d09 not found: ID does not exist" containerID="a2bbf9dec29cf444201915b2d3a693d2429e4d4fd8d16ebe5d268603a8f62d09" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.603962 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2bbf9dec29cf444201915b2d3a693d2429e4d4fd8d16ebe5d268603a8f62d09"} err="failed to get container status \"a2bbf9dec29cf444201915b2d3a693d2429e4d4fd8d16ebe5d268603a8f62d09\": rpc error: code = NotFound desc = could not find container \"a2bbf9dec29cf444201915b2d3a693d2429e4d4fd8d16ebe5d268603a8f62d09\": container with ID starting with a2bbf9dec29cf444201915b2d3a693d2429e4d4fd8d16ebe5d268603a8f62d09 not found: ID does not exist" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.603984 4935 scope.go:117] "RemoveContainer" containerID="f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0" Dec 01 19:03:57 crc kubenswrapper[4935]: E1201 19:03:57.604171 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0\": container with ID starting with f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0 not found: ID does not exist" containerID="f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.604191 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0"} err="failed to get container status \"f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0\": rpc error: code = NotFound desc = could not find container \"f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0\": container with ID starting with f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0 not found: ID does not exist" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.604203 4935 scope.go:117] "RemoveContainer" containerID="289ae5195d11eb8b37c2e81d549cffa65248c8660b6665ea73e9755cb5e26fcb" Dec 01 19:03:57 crc kubenswrapper[4935]: E1201 19:03:57.604369 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"289ae5195d11eb8b37c2e81d549cffa65248c8660b6665ea73e9755cb5e26fcb\": container with ID starting with 289ae5195d11eb8b37c2e81d549cffa65248c8660b6665ea73e9755cb5e26fcb not found: ID does not exist" containerID="289ae5195d11eb8b37c2e81d549cffa65248c8660b6665ea73e9755cb5e26fcb" Dec 01 19:03:57 crc kubenswrapper[4935]: I1201 19:03:57.604390 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"289ae5195d11eb8b37c2e81d549cffa65248c8660b6665ea73e9755cb5e26fcb"} err="failed to get container status \"289ae5195d11eb8b37c2e81d549cffa65248c8660b6665ea73e9755cb5e26fcb\": rpc error: code = NotFound desc = could not find container \"289ae5195d11eb8b37c2e81d549cffa65248c8660b6665ea73e9755cb5e26fcb\": container with ID starting with 289ae5195d11eb8b37c2e81d549cffa65248c8660b6665ea73e9755cb5e26fcb not found: ID does not exist" Dec 01 19:03:58 crc kubenswrapper[4935]: I1201 19:03:58.520358 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f57e49a-0622-49c4-9c2c-1b744f01aa0a" path="/var/lib/kubelet/pods/2f57e49a-0622-49c4-9c2c-1b744f01aa0a/volumes" Dec 01 19:04:01 crc kubenswrapper[4935]: E1201 19:04:01.982640 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57e49a_0622_49c4_9c2c_1b744f01aa0a.slice/crio-f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.201631 4935 scope.go:117] "RemoveContainer" containerID="69c6a7720248d1520ab12ee6dabb92f2c396ef1accea9a378018a9d7edceae15" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.233802 4935 scope.go:117] "RemoveContainer" containerID="f282d3655ee39893ad55f6a461309e5eaa28847e561c6405b69dd3158e9614da" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.300718 4935 scope.go:117] "RemoveContainer" containerID="c2e6634569090e74b250f5efe6a6fce97c638e390af2caf1223214d92a0623d7" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.342314 4935 scope.go:117] "RemoveContainer" containerID="e2c86ee668bf34756612494549d233785400d4f2d44120edf6e90badbf6bd211" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.393371 4935 scope.go:117] "RemoveContainer" containerID="a7234599bcf74ceabf90054559d822bcb88cbb1cff93800a1fda6c77e26c35b7" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.423781 4935 scope.go:117] "RemoveContainer" containerID="67962251ed45178d83235949cf126a542e946bc8b9e8b45060209f3efe536cc3" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.466767 4935 scope.go:117] "RemoveContainer" containerID="725df424051b41ce28f3fbef2a77bc719d8b01bac141ca6a0bc2b53be02b8227" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.520407 4935 scope.go:117] "RemoveContainer" containerID="efe3173a5574d2a83c6a8fa1d574b2b291389255d75c6bcc74237ab50a019a67" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.544488 4935 scope.go:117] "RemoveContainer" containerID="9d11efdc57154e16c39f2cd0e26fc5f227c8bb2feddf55dc3b1b589543aef86f" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.584704 4935 scope.go:117] "RemoveContainer" containerID="af689a59f2ed63212cc38e7e2fcf05d4aec8726fc26015b05f07bd47ad17db54" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.608304 4935 scope.go:117] "RemoveContainer" containerID="5a22f1e43d5ad5ab08ffe12f8af805c0f3f992c67e874b8bfa19aa7e888d9153" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.632871 4935 scope.go:117] "RemoveContainer" containerID="6840baa43aea9b4476c18684f33dd192e041f745869b7c2a5ac6d5a2f32da409" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.686319 4935 scope.go:117] "RemoveContainer" containerID="9388b77900307818a2bd9915995d6fc0844b9768b8a8bf5df705398d6333b1d7" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.721650 4935 scope.go:117] "RemoveContainer" containerID="b42090b8cfe8b7c9bd166b7e25729becf52269876038295b5c241f5d63ac770a" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.755409 4935 scope.go:117] "RemoveContainer" containerID="1141d1b2daa6d355f7be4666cd3adf07d9676e697c3a9274d42b6f53e9cce9e7" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.780257 4935 scope.go:117] "RemoveContainer" containerID="0afd684f60748aad72733a1ddbf21870a08e431bf7883e8157dfdc3009c562f7" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.815467 4935 scope.go:117] "RemoveContainer" containerID="53e71c09878f4a9427e1f81f62e70de9cc6930534633948a164260e3fd210689" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.838899 4935 scope.go:117] "RemoveContainer" containerID="cfd9426175cec259574b93c4f2860b48e03bc102735a7964cbc86e06d60ffac5" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.858622 4935 scope.go:117] "RemoveContainer" containerID="a6d384affd115922c2418f4a1f7a10eb14e06b018ba9f5c2c5a996b2e146a854" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.913336 4935 scope.go:117] "RemoveContainer" containerID="d22b53fa3060ca41bf78d06d4affc1f754185a1f8e42cc29ab1171ff35674ccd" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.935825 4935 scope.go:117] "RemoveContainer" containerID="41edcf9677c229a49d6fb8ea1319b7a64c47f6a81ce11a865446dd28ae2f849d" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.956812 4935 scope.go:117] "RemoveContainer" containerID="a138dadec0e8590d728166b9f6e4dd19024f575a8c29a4316d4f3f0e93c36cdf" Dec 01 19:04:07 crc kubenswrapper[4935]: I1201 19:04:07.979944 4935 scope.go:117] "RemoveContainer" containerID="3b5e7c06e3947b1239cbc24ee7388e566a5e74282ad5aa98e0bf1c0d6d02ba16" Dec 01 19:04:09 crc kubenswrapper[4935]: E1201 19:04:09.907653 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57e49a_0622_49c4_9c2c_1b744f01aa0a.slice/crio-f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:04:12 crc kubenswrapper[4935]: E1201 19:04:12.035144 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57e49a_0622_49c4_9c2c_1b744f01aa0a.slice/crio-f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:04:22 crc kubenswrapper[4935]: E1201 19:04:22.344605 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57e49a_0622_49c4_9c2c_1b744f01aa0a.slice/crio-f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:04:24 crc kubenswrapper[4935]: I1201 19:04:24.346035 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:04:24 crc kubenswrapper[4935]: I1201 19:04:24.346451 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:04:24 crc kubenswrapper[4935]: I1201 19:04:24.346515 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 19:04:24 crc kubenswrapper[4935]: I1201 19:04:24.347854 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"65f55d9794ae9f16c4bcf6f9b1370c7d8f50ea2498f356417eb8e5a61df9d9bb"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:04:24 crc kubenswrapper[4935]: I1201 19:04:24.347962 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://65f55d9794ae9f16c4bcf6f9b1370c7d8f50ea2498f356417eb8e5a61df9d9bb" gracePeriod=600 Dec 01 19:04:24 crc kubenswrapper[4935]: E1201 19:04:24.675396 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57e49a_0622_49c4_9c2c_1b744f01aa0a.slice/crio-f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:04:24 crc kubenswrapper[4935]: I1201 19:04:24.913572 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="65f55d9794ae9f16c4bcf6f9b1370c7d8f50ea2498f356417eb8e5a61df9d9bb" exitCode=0 Dec 01 19:04:24 crc kubenswrapper[4935]: I1201 19:04:24.913613 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"65f55d9794ae9f16c4bcf6f9b1370c7d8f50ea2498f356417eb8e5a61df9d9bb"} Dec 01 19:04:24 crc kubenswrapper[4935]: I1201 19:04:24.913646 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4"} Dec 01 19:04:24 crc kubenswrapper[4935]: I1201 19:04:24.913661 4935 scope.go:117] "RemoveContainer" containerID="d15e933891212c605ea22680c68a2511d0612d014970802c29f8fdb65dc72842" Dec 01 19:04:25 crc kubenswrapper[4935]: I1201 19:04:25.056012 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-d4ntp"] Dec 01 19:04:25 crc kubenswrapper[4935]: I1201 19:04:25.071893 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-8mddp"] Dec 01 19:04:25 crc kubenswrapper[4935]: I1201 19:04:25.092203 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-8mddp"] Dec 01 19:04:25 crc kubenswrapper[4935]: I1201 19:04:25.108835 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-d4ntp"] Dec 01 19:04:26 crc kubenswrapper[4935]: I1201 19:04:26.547324 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10f9fc1e-d72b-4123-b805-82a03d56c439" path="/var/lib/kubelet/pods/10f9fc1e-d72b-4123-b805-82a03d56c439/volumes" Dec 01 19:04:26 crc kubenswrapper[4935]: I1201 19:04:26.550691 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a7ac48a-042f-4d13-a9ac-d8449e732bbf" path="/var/lib/kubelet/pods/1a7ac48a-042f-4d13-a9ac-d8449e732bbf/volumes" Dec 01 19:04:32 crc kubenswrapper[4935]: I1201 19:04:32.059722 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-bwph6"] Dec 01 19:04:32 crc kubenswrapper[4935]: I1201 19:04:32.079416 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-bwph6"] Dec 01 19:04:32 crc kubenswrapper[4935]: I1201 19:04:32.520004 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae58d3fe-1a16-467c-b5c9-9522cb473a03" path="/var/lib/kubelet/pods/ae58d3fe-1a16-467c-b5c9-9522cb473a03/volumes" Dec 01 19:04:32 crc kubenswrapper[4935]: E1201 19:04:32.670035 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57e49a_0622_49c4_9c2c_1b744f01aa0a.slice/crio-f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:04:37 crc kubenswrapper[4935]: I1201 19:04:37.051757 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-bb44q"] Dec 01 19:04:37 crc kubenswrapper[4935]: I1201 19:04:37.065565 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-bb44q"] Dec 01 19:04:38 crc kubenswrapper[4935]: I1201 19:04:38.531566 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56e181ed-d603-4a53-aa57-222d888ce5a2" path="/var/lib/kubelet/pods/56e181ed-d603-4a53-aa57-222d888ce5a2/volumes" Dec 01 19:04:39 crc kubenswrapper[4935]: E1201 19:04:39.952665 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57e49a_0622_49c4_9c2c_1b744f01aa0a.slice/crio-f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:04:42 crc kubenswrapper[4935]: E1201 19:04:42.711117 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57e49a_0622_49c4_9c2c_1b744f01aa0a.slice/crio-f8e18f4fa57fccfbe4f029a4a5a511be7f872f511f25ad759a661676ba81e8f0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:04:53 crc kubenswrapper[4935]: I1201 19:04:53.033825 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-gk8gw"] Dec 01 19:04:53 crc kubenswrapper[4935]: I1201 19:04:53.049461 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-gk8gw"] Dec 01 19:04:54 crc kubenswrapper[4935]: I1201 19:04:54.526902 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e60c370-8ffd-4b97-a829-176da28bf116" path="/var/lib/kubelet/pods/1e60c370-8ffd-4b97-a829-176da28bf116/volumes" Dec 01 19:04:55 crc kubenswrapper[4935]: I1201 19:04:55.047867 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-c8448"] Dec 01 19:04:55 crc kubenswrapper[4935]: I1201 19:04:55.061999 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-c8448"] Dec 01 19:04:56 crc kubenswrapper[4935]: I1201 19:04:56.543066 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8f827a2-a529-4371-8c82-c06377b2c9f2" path="/var/lib/kubelet/pods/b8f827a2-a529-4371-8c82-c06377b2c9f2/volumes" Dec 01 19:05:08 crc kubenswrapper[4935]: I1201 19:05:08.571675 4935 scope.go:117] "RemoveContainer" containerID="6fe0e44ed0de290991d50c077f3a0f53acd6c7fa8f04a427206ac15bc2a4307a" Dec 01 19:05:08 crc kubenswrapper[4935]: I1201 19:05:08.618020 4935 scope.go:117] "RemoveContainer" containerID="ca33ca7b137a666f9040e8783881de9988ea8fe540509c7029539cdc971f5f57" Dec 01 19:05:08 crc kubenswrapper[4935]: I1201 19:05:08.710436 4935 scope.go:117] "RemoveContainer" containerID="03a1a6c961272e9ce1ff5b99476ddd33cad510815560c954160a1e690e78533b" Dec 01 19:05:08 crc kubenswrapper[4935]: I1201 19:05:08.758882 4935 scope.go:117] "RemoveContainer" containerID="3e25ffdbe38bd4a9b28f3317153dffd84e54ae26581824400f54ff9f78ac7e4d" Dec 01 19:05:08 crc kubenswrapper[4935]: I1201 19:05:08.812190 4935 scope.go:117] "RemoveContainer" containerID="8e07534970642545d9945301261beac2ca2b1894d54971fca61311cdd0d03f4a" Dec 01 19:05:08 crc kubenswrapper[4935]: I1201 19:05:08.857412 4935 scope.go:117] "RemoveContainer" containerID="485d989eb2289e695767dc43c6ac3f906250cd329a311eceabf214a7271ea533" Dec 01 19:05:41 crc kubenswrapper[4935]: I1201 19:05:41.053959 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-5chlp"] Dec 01 19:05:41 crc kubenswrapper[4935]: I1201 19:05:41.069005 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-5chlp"] Dec 01 19:05:42 crc kubenswrapper[4935]: I1201 19:05:42.530915 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65f74b49-9135-49c6-af13-5107b45a5dd3" path="/var/lib/kubelet/pods/65f74b49-9135-49c6-af13-5107b45a5dd3/volumes" Dec 01 19:05:45 crc kubenswrapper[4935]: I1201 19:05:45.051285 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-5142-account-create-update-nhlr5"] Dec 01 19:05:45 crc kubenswrapper[4935]: I1201 19:05:45.063661 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-qcl98"] Dec 01 19:05:45 crc kubenswrapper[4935]: I1201 19:05:45.081771 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-gnbwb"] Dec 01 19:05:45 crc kubenswrapper[4935]: I1201 19:05:45.090448 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-d2fe-account-create-update-9n2k8"] Dec 01 19:05:45 crc kubenswrapper[4935]: I1201 19:05:45.099335 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-d2fe-account-create-update-9n2k8"] Dec 01 19:05:45 crc kubenswrapper[4935]: I1201 19:05:45.107032 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-7f6d-account-create-update-gtx98"] Dec 01 19:05:45 crc kubenswrapper[4935]: I1201 19:05:45.114980 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-5142-account-create-update-nhlr5"] Dec 01 19:05:45 crc kubenswrapper[4935]: I1201 19:05:45.124021 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-qcl98"] Dec 01 19:05:45 crc kubenswrapper[4935]: I1201 19:05:45.134368 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-gnbwb"] Dec 01 19:05:45 crc kubenswrapper[4935]: I1201 19:05:45.143471 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-7f6d-account-create-update-gtx98"] Dec 01 19:05:46 crc kubenswrapper[4935]: I1201 19:05:46.567506 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="240c55dd-0ae3-4867-9a2b-1608dad2c7c3" path="/var/lib/kubelet/pods/240c55dd-0ae3-4867-9a2b-1608dad2c7c3/volumes" Dec 01 19:05:46 crc kubenswrapper[4935]: I1201 19:05:46.587107 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8ca764e-f73d-424c-ac48-60af2d5729d2" path="/var/lib/kubelet/pods/a8ca764e-f73d-424c-ac48-60af2d5729d2/volumes" Dec 01 19:05:46 crc kubenswrapper[4935]: I1201 19:05:46.587894 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0b1e7a8-88dd-4890-8971-eec026c2d209" path="/var/lib/kubelet/pods/e0b1e7a8-88dd-4890-8971-eec026c2d209/volumes" Dec 01 19:05:46 crc kubenswrapper[4935]: I1201 19:05:46.607706 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e155f755-b0bf-4344-a18d-4b54b783c589" path="/var/lib/kubelet/pods/e155f755-b0bf-4344-a18d-4b54b783c589/volumes" Dec 01 19:05:46 crc kubenswrapper[4935]: I1201 19:05:46.610408 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7429614-81d2-4c30-a261-321dd8d020dc" path="/var/lib/kubelet/pods/f7429614-81d2-4c30-a261-321dd8d020dc/volumes" Dec 01 19:05:55 crc kubenswrapper[4935]: I1201 19:05:55.201083 4935 generic.go:334] "Generic (PLEG): container finished" podID="763abe02-1bdf-4403-a139-a15aba539519" containerID="e1e7e3a976b808dd41a585b8863ac44b2832fd3910b73393dac05aa817d94d8c" exitCode=0 Dec 01 19:05:55 crc kubenswrapper[4935]: I1201 19:05:55.201189 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" event={"ID":"763abe02-1bdf-4403-a139-a15aba539519","Type":"ContainerDied","Data":"e1e7e3a976b808dd41a585b8863ac44b2832fd3910b73393dac05aa817d94d8c"} Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.832316 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qptw6"] Dec 01 19:05:56 crc kubenswrapper[4935]: E1201 19:05:56.833269 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f57e49a-0622-49c4-9c2c-1b744f01aa0a" containerName="extract-utilities" Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.833282 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f57e49a-0622-49c4-9c2c-1b744f01aa0a" containerName="extract-utilities" Dec 01 19:05:56 crc kubenswrapper[4935]: E1201 19:05:56.833300 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f57e49a-0622-49c4-9c2c-1b744f01aa0a" containerName="registry-server" Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.833306 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f57e49a-0622-49c4-9c2c-1b744f01aa0a" containerName="registry-server" Dec 01 19:05:56 crc kubenswrapper[4935]: E1201 19:05:56.833316 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f57e49a-0622-49c4-9c2c-1b744f01aa0a" containerName="extract-content" Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.833322 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f57e49a-0622-49c4-9c2c-1b744f01aa0a" containerName="extract-content" Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.833559 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f57e49a-0622-49c4-9c2c-1b744f01aa0a" containerName="registry-server" Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.835093 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.838737 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.856683 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qptw6"] Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.928055 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/763abe02-1bdf-4403-a139-a15aba539519-ssh-key\") pod \"763abe02-1bdf-4403-a139-a15aba539519\" (UID: \"763abe02-1bdf-4403-a139-a15aba539519\") " Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.928206 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/763abe02-1bdf-4403-a139-a15aba539519-inventory\") pod \"763abe02-1bdf-4403-a139-a15aba539519\" (UID: \"763abe02-1bdf-4403-a139-a15aba539519\") " Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.928373 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2v4dg\" (UniqueName: \"kubernetes.io/projected/763abe02-1bdf-4403-a139-a15aba539519-kube-api-access-2v4dg\") pod \"763abe02-1bdf-4403-a139-a15aba539519\" (UID: \"763abe02-1bdf-4403-a139-a15aba539519\") " Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.928693 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c29cfcd4-b473-4a8c-aeee-1a005b733524-catalog-content\") pod \"redhat-operators-qptw6\" (UID: \"c29cfcd4-b473-4a8c-aeee-1a005b733524\") " pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.928823 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c29cfcd4-b473-4a8c-aeee-1a005b733524-utilities\") pod \"redhat-operators-qptw6\" (UID: \"c29cfcd4-b473-4a8c-aeee-1a005b733524\") " pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.928934 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml4k9\" (UniqueName: \"kubernetes.io/projected/c29cfcd4-b473-4a8c-aeee-1a005b733524-kube-api-access-ml4k9\") pod \"redhat-operators-qptw6\" (UID: \"c29cfcd4-b473-4a8c-aeee-1a005b733524\") " pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.936440 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/763abe02-1bdf-4403-a139-a15aba539519-kube-api-access-2v4dg" (OuterVolumeSpecName: "kube-api-access-2v4dg") pod "763abe02-1bdf-4403-a139-a15aba539519" (UID: "763abe02-1bdf-4403-a139-a15aba539519"). InnerVolumeSpecName "kube-api-access-2v4dg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.962698 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/763abe02-1bdf-4403-a139-a15aba539519-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "763abe02-1bdf-4403-a139-a15aba539519" (UID: "763abe02-1bdf-4403-a139-a15aba539519"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:05:56 crc kubenswrapper[4935]: I1201 19:05:56.975701 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/763abe02-1bdf-4403-a139-a15aba539519-inventory" (OuterVolumeSpecName: "inventory") pod "763abe02-1bdf-4403-a139-a15aba539519" (UID: "763abe02-1bdf-4403-a139-a15aba539519"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.031211 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c29cfcd4-b473-4a8c-aeee-1a005b733524-utilities\") pod \"redhat-operators-qptw6\" (UID: \"c29cfcd4-b473-4a8c-aeee-1a005b733524\") " pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.031599 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml4k9\" (UniqueName: \"kubernetes.io/projected/c29cfcd4-b473-4a8c-aeee-1a005b733524-kube-api-access-ml4k9\") pod \"redhat-operators-qptw6\" (UID: \"c29cfcd4-b473-4a8c-aeee-1a005b733524\") " pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.031705 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c29cfcd4-b473-4a8c-aeee-1a005b733524-catalog-content\") pod \"redhat-operators-qptw6\" (UID: \"c29cfcd4-b473-4a8c-aeee-1a005b733524\") " pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.031647 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c29cfcd4-b473-4a8c-aeee-1a005b733524-utilities\") pod \"redhat-operators-qptw6\" (UID: \"c29cfcd4-b473-4a8c-aeee-1a005b733524\") " pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.032007 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/763abe02-1bdf-4403-a139-a15aba539519-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.032080 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/763abe02-1bdf-4403-a139-a15aba539519-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.032137 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2v4dg\" (UniqueName: \"kubernetes.io/projected/763abe02-1bdf-4403-a139-a15aba539519-kube-api-access-2v4dg\") on node \"crc\" DevicePath \"\"" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.032178 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c29cfcd4-b473-4a8c-aeee-1a005b733524-catalog-content\") pod \"redhat-operators-qptw6\" (UID: \"c29cfcd4-b473-4a8c-aeee-1a005b733524\") " pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.049394 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml4k9\" (UniqueName: \"kubernetes.io/projected/c29cfcd4-b473-4a8c-aeee-1a005b733524-kube-api-access-ml4k9\") pod \"redhat-operators-qptw6\" (UID: \"c29cfcd4-b473-4a8c-aeee-1a005b733524\") " pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.151400 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.231391 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" event={"ID":"763abe02-1bdf-4403-a139-a15aba539519","Type":"ContainerDied","Data":"728d5994bebe0b9c59ca41fd282d563af12ffbb07f72d9da4b258fb732595356"} Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.231428 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="728d5994bebe0b9c59ca41fd282d563af12ffbb07f72d9da4b258fb732595356" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.231480 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.348079 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn"] Dec 01 19:05:57 crc kubenswrapper[4935]: E1201 19:05:57.348940 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="763abe02-1bdf-4403-a139-a15aba539519" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.348971 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="763abe02-1bdf-4403-a139-a15aba539519" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.349243 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="763abe02-1bdf-4403-a139-a15aba539519" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.350473 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.355585 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.355931 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.356004 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.356322 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.357495 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn"] Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.441747 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnwjl\" (UniqueName: \"kubernetes.io/projected/0048b1eb-735d-437d-b7bd-ad2814905c56-kube-api-access-nnwjl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn\" (UID: \"0048b1eb-735d-437d-b7bd-ad2814905c56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.442016 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0048b1eb-735d-437d-b7bd-ad2814905c56-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn\" (UID: \"0048b1eb-735d-437d-b7bd-ad2814905c56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.442527 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0048b1eb-735d-437d-b7bd-ad2814905c56-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn\" (UID: \"0048b1eb-735d-437d-b7bd-ad2814905c56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.544317 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0048b1eb-735d-437d-b7bd-ad2814905c56-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn\" (UID: \"0048b1eb-735d-437d-b7bd-ad2814905c56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.544491 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0048b1eb-735d-437d-b7bd-ad2814905c56-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn\" (UID: \"0048b1eb-735d-437d-b7bd-ad2814905c56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.544538 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnwjl\" (UniqueName: \"kubernetes.io/projected/0048b1eb-735d-437d-b7bd-ad2814905c56-kube-api-access-nnwjl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn\" (UID: \"0048b1eb-735d-437d-b7bd-ad2814905c56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.550251 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0048b1eb-735d-437d-b7bd-ad2814905c56-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn\" (UID: \"0048b1eb-735d-437d-b7bd-ad2814905c56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.550733 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0048b1eb-735d-437d-b7bd-ad2814905c56-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn\" (UID: \"0048b1eb-735d-437d-b7bd-ad2814905c56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.566133 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnwjl\" (UniqueName: \"kubernetes.io/projected/0048b1eb-735d-437d-b7bd-ad2814905c56-kube-api-access-nnwjl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn\" (UID: \"0048b1eb-735d-437d-b7bd-ad2814905c56\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.669109 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" Dec 01 19:05:57 crc kubenswrapper[4935]: I1201 19:05:57.681475 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qptw6"] Dec 01 19:05:58 crc kubenswrapper[4935]: I1201 19:05:58.249938 4935 generic.go:334] "Generic (PLEG): container finished" podID="c29cfcd4-b473-4a8c-aeee-1a005b733524" containerID="03707e7d4a8798ac3a165bd5428ddbe896026a263aebb402509b3d3424ac9396" exitCode=0 Dec 01 19:05:58 crc kubenswrapper[4935]: I1201 19:05:58.249995 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qptw6" event={"ID":"c29cfcd4-b473-4a8c-aeee-1a005b733524","Type":"ContainerDied","Data":"03707e7d4a8798ac3a165bd5428ddbe896026a263aebb402509b3d3424ac9396"} Dec 01 19:05:58 crc kubenswrapper[4935]: I1201 19:05:58.250393 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qptw6" event={"ID":"c29cfcd4-b473-4a8c-aeee-1a005b733524","Type":"ContainerStarted","Data":"21a158c07be1d1c4318a806192aec2cca457c94a6e1de51420f5f4d2f9622fb6"} Dec 01 19:05:58 crc kubenswrapper[4935]: I1201 19:05:58.315141 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn"] Dec 01 19:05:59 crc kubenswrapper[4935]: I1201 19:05:59.259953 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" event={"ID":"0048b1eb-735d-437d-b7bd-ad2814905c56","Type":"ContainerStarted","Data":"79d3d06b36e281957148bd1c6b2bbd2e0f2407e2e03f0f833bea73c31c1f4579"} Dec 01 19:05:59 crc kubenswrapper[4935]: I1201 19:05:59.260601 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" event={"ID":"0048b1eb-735d-437d-b7bd-ad2814905c56","Type":"ContainerStarted","Data":"bd8b26afd4fd2de9be8d8e1eefd7cdbbca2f57023fd569d528d3eb540ba68aaf"} Dec 01 19:05:59 crc kubenswrapper[4935]: I1201 19:05:59.279176 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" podStartSLOduration=1.7849254559999999 podStartE2EDuration="2.279139371s" podCreationTimestamp="2025-12-01 19:05:57 +0000 UTC" firstStartedPulling="2025-12-01 19:05:58.320241763 +0000 UTC m=+2172.341871022" lastFinishedPulling="2025-12-01 19:05:58.814455678 +0000 UTC m=+2172.836084937" observedRunningTime="2025-12-01 19:05:59.273614726 +0000 UTC m=+2173.295243985" watchObservedRunningTime="2025-12-01 19:05:59.279139371 +0000 UTC m=+2173.300768630" Dec 01 19:06:00 crc kubenswrapper[4935]: I1201 19:06:00.279347 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qptw6" event={"ID":"c29cfcd4-b473-4a8c-aeee-1a005b733524","Type":"ContainerStarted","Data":"2f46bcac1da623497e5eff63380447a1d49b6f9b0315ad277ab8224da8a424c4"} Dec 01 19:06:04 crc kubenswrapper[4935]: I1201 19:06:04.331425 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qptw6" event={"ID":"c29cfcd4-b473-4a8c-aeee-1a005b733524","Type":"ContainerDied","Data":"2f46bcac1da623497e5eff63380447a1d49b6f9b0315ad277ab8224da8a424c4"} Dec 01 19:06:04 crc kubenswrapper[4935]: I1201 19:06:04.331427 4935 generic.go:334] "Generic (PLEG): container finished" podID="c29cfcd4-b473-4a8c-aeee-1a005b733524" containerID="2f46bcac1da623497e5eff63380447a1d49b6f9b0315ad277ab8224da8a424c4" exitCode=0 Dec 01 19:06:06 crc kubenswrapper[4935]: I1201 19:06:06.360532 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qptw6" event={"ID":"c29cfcd4-b473-4a8c-aeee-1a005b733524","Type":"ContainerStarted","Data":"f3db23aaed67fee3a7a725cb6a03b249f2b832610c9b2a9c817bffaca76e9794"} Dec 01 19:06:06 crc kubenswrapper[4935]: I1201 19:06:06.386548 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qptw6" podStartSLOduration=3.048678109 podStartE2EDuration="10.38652614s" podCreationTimestamp="2025-12-01 19:05:56 +0000 UTC" firstStartedPulling="2025-12-01 19:05:58.252053328 +0000 UTC m=+2172.273682587" lastFinishedPulling="2025-12-01 19:06:05.589901329 +0000 UTC m=+2179.611530618" observedRunningTime="2025-12-01 19:06:06.377212996 +0000 UTC m=+2180.398842255" watchObservedRunningTime="2025-12-01 19:06:06.38652614 +0000 UTC m=+2180.408155419" Dec 01 19:06:07 crc kubenswrapper[4935]: I1201 19:06:07.152477 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:06:07 crc kubenswrapper[4935]: I1201 19:06:07.154020 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:06:08 crc kubenswrapper[4935]: I1201 19:06:08.213418 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qptw6" podUID="c29cfcd4-b473-4a8c-aeee-1a005b733524" containerName="registry-server" probeResult="failure" output=< Dec 01 19:06:08 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 19:06:08 crc kubenswrapper[4935]: > Dec 01 19:06:09 crc kubenswrapper[4935]: I1201 19:06:09.054557 4935 scope.go:117] "RemoveContainer" containerID="28e492e821f79da37953b76a30fe28f93a2c7d6a388d5d606318348c8cc2985b" Dec 01 19:06:09 crc kubenswrapper[4935]: I1201 19:06:09.092742 4935 scope.go:117] "RemoveContainer" containerID="57e00f233832965cad5cf29aef91055ecec6ba0271a1183acf5dada0e6b57f4d" Dec 01 19:06:09 crc kubenswrapper[4935]: I1201 19:06:09.150307 4935 scope.go:117] "RemoveContainer" containerID="fd8359a5f4ff01b7307717f6d351ecd0b8e40c46e58d3d8f08ce137d5150f135" Dec 01 19:06:09 crc kubenswrapper[4935]: I1201 19:06:09.215687 4935 scope.go:117] "RemoveContainer" containerID="c4c36780c4c88b1e623dd759bd59ee9330fa5edc20a76956b817b57ea390c9ec" Dec 01 19:06:09 crc kubenswrapper[4935]: I1201 19:06:09.291264 4935 scope.go:117] "RemoveContainer" containerID="e2fc52c6df4b1b402806969a4bb97c7f47b4497942debe23465a2d57cb4851b8" Dec 01 19:06:09 crc kubenswrapper[4935]: I1201 19:06:09.346116 4935 scope.go:117] "RemoveContainer" containerID="0ae9dfbd52e4ab66ee55d0f32d02ecec7530ed96c084ebf800933c5c64fe7fdc" Dec 01 19:06:16 crc kubenswrapper[4935]: I1201 19:06:16.067676 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nr876"] Dec 01 19:06:16 crc kubenswrapper[4935]: I1201 19:06:16.083452 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nr876"] Dec 01 19:06:16 crc kubenswrapper[4935]: I1201 19:06:16.531240 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b8ef570-0e9e-426b-a8dc-dadd94b78be3" path="/var/lib/kubelet/pods/4b8ef570-0e9e-426b-a8dc-dadd94b78be3/volumes" Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.119755 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zrf6s"] Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.125373 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.140913 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zrf6s"] Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.236006 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.246158 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b977894a-ef33-4058-8342-119d22a7a4fd-utilities\") pod \"community-operators-zrf6s\" (UID: \"b977894a-ef33-4058-8342-119d22a7a4fd\") " pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.246227 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7qdm\" (UniqueName: \"kubernetes.io/projected/b977894a-ef33-4058-8342-119d22a7a4fd-kube-api-access-b7qdm\") pod \"community-operators-zrf6s\" (UID: \"b977894a-ef33-4058-8342-119d22a7a4fd\") " pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.246261 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b977894a-ef33-4058-8342-119d22a7a4fd-catalog-content\") pod \"community-operators-zrf6s\" (UID: \"b977894a-ef33-4058-8342-119d22a7a4fd\") " pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.290027 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.348178 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b977894a-ef33-4058-8342-119d22a7a4fd-utilities\") pod \"community-operators-zrf6s\" (UID: \"b977894a-ef33-4058-8342-119d22a7a4fd\") " pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.348271 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7qdm\" (UniqueName: \"kubernetes.io/projected/b977894a-ef33-4058-8342-119d22a7a4fd-kube-api-access-b7qdm\") pod \"community-operators-zrf6s\" (UID: \"b977894a-ef33-4058-8342-119d22a7a4fd\") " pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.348310 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b977894a-ef33-4058-8342-119d22a7a4fd-catalog-content\") pod \"community-operators-zrf6s\" (UID: \"b977894a-ef33-4058-8342-119d22a7a4fd\") " pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.349051 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b977894a-ef33-4058-8342-119d22a7a4fd-utilities\") pod \"community-operators-zrf6s\" (UID: \"b977894a-ef33-4058-8342-119d22a7a4fd\") " pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.349798 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b977894a-ef33-4058-8342-119d22a7a4fd-catalog-content\") pod \"community-operators-zrf6s\" (UID: \"b977894a-ef33-4058-8342-119d22a7a4fd\") " pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.380138 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7qdm\" (UniqueName: \"kubernetes.io/projected/b977894a-ef33-4058-8342-119d22a7a4fd-kube-api-access-b7qdm\") pod \"community-operators-zrf6s\" (UID: \"b977894a-ef33-4058-8342-119d22a7a4fd\") " pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:17 crc kubenswrapper[4935]: I1201 19:06:17.482442 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:18 crc kubenswrapper[4935]: I1201 19:06:18.310400 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zrf6s"] Dec 01 19:06:18 crc kubenswrapper[4935]: I1201 19:06:18.526556 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zrf6s" event={"ID":"b977894a-ef33-4058-8342-119d22a7a4fd","Type":"ContainerStarted","Data":"666c71145f486b891280ae1d23630b27e0427cbc2c6d466fd41cd31e54f2bfbd"} Dec 01 19:06:19 crc kubenswrapper[4935]: I1201 19:06:19.538569 4935 generic.go:334] "Generic (PLEG): container finished" podID="b977894a-ef33-4058-8342-119d22a7a4fd" containerID="86e739efc1012fce8aed1797dfc8e226e0d51786bb8819700c065a47d4be2ab1" exitCode=0 Dec 01 19:06:19 crc kubenswrapper[4935]: I1201 19:06:19.538894 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zrf6s" event={"ID":"b977894a-ef33-4058-8342-119d22a7a4fd","Type":"ContainerDied","Data":"86e739efc1012fce8aed1797dfc8e226e0d51786bb8819700c065a47d4be2ab1"} Dec 01 19:06:19 crc kubenswrapper[4935]: I1201 19:06:19.671088 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qptw6"] Dec 01 19:06:19 crc kubenswrapper[4935]: I1201 19:06:19.671505 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qptw6" podUID="c29cfcd4-b473-4a8c-aeee-1a005b733524" containerName="registry-server" containerID="cri-o://f3db23aaed67fee3a7a725cb6a03b249f2b832610c9b2a9c817bffaca76e9794" gracePeriod=2 Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.311214 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.431540 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c29cfcd4-b473-4a8c-aeee-1a005b733524-utilities\") pod \"c29cfcd4-b473-4a8c-aeee-1a005b733524\" (UID: \"c29cfcd4-b473-4a8c-aeee-1a005b733524\") " Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.431853 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c29cfcd4-b473-4a8c-aeee-1a005b733524-catalog-content\") pod \"c29cfcd4-b473-4a8c-aeee-1a005b733524\" (UID: \"c29cfcd4-b473-4a8c-aeee-1a005b733524\") " Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.432057 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ml4k9\" (UniqueName: \"kubernetes.io/projected/c29cfcd4-b473-4a8c-aeee-1a005b733524-kube-api-access-ml4k9\") pod \"c29cfcd4-b473-4a8c-aeee-1a005b733524\" (UID: \"c29cfcd4-b473-4a8c-aeee-1a005b733524\") " Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.432213 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c29cfcd4-b473-4a8c-aeee-1a005b733524-utilities" (OuterVolumeSpecName: "utilities") pod "c29cfcd4-b473-4a8c-aeee-1a005b733524" (UID: "c29cfcd4-b473-4a8c-aeee-1a005b733524"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.432776 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c29cfcd4-b473-4a8c-aeee-1a005b733524-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.443173 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c29cfcd4-b473-4a8c-aeee-1a005b733524-kube-api-access-ml4k9" (OuterVolumeSpecName: "kube-api-access-ml4k9") pod "c29cfcd4-b473-4a8c-aeee-1a005b733524" (UID: "c29cfcd4-b473-4a8c-aeee-1a005b733524"). InnerVolumeSpecName "kube-api-access-ml4k9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.534386 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ml4k9\" (UniqueName: \"kubernetes.io/projected/c29cfcd4-b473-4a8c-aeee-1a005b733524-kube-api-access-ml4k9\") on node \"crc\" DevicePath \"\"" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.554407 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zrf6s" event={"ID":"b977894a-ef33-4058-8342-119d22a7a4fd","Type":"ContainerStarted","Data":"a1478c5681196b5553d22089ef7332e6c9f8992d40dcf0d827fdb67be99d2a8a"} Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.559251 4935 generic.go:334] "Generic (PLEG): container finished" podID="c29cfcd4-b473-4a8c-aeee-1a005b733524" containerID="f3db23aaed67fee3a7a725cb6a03b249f2b832610c9b2a9c817bffaca76e9794" exitCode=0 Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.559354 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qptw6" event={"ID":"c29cfcd4-b473-4a8c-aeee-1a005b733524","Type":"ContainerDied","Data":"f3db23aaed67fee3a7a725cb6a03b249f2b832610c9b2a9c817bffaca76e9794"} Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.559444 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qptw6" event={"ID":"c29cfcd4-b473-4a8c-aeee-1a005b733524","Type":"ContainerDied","Data":"21a158c07be1d1c4318a806192aec2cca457c94a6e1de51420f5f4d2f9622fb6"} Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.559391 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qptw6" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.559463 4935 scope.go:117] "RemoveContainer" containerID="f3db23aaed67fee3a7a725cb6a03b249f2b832610c9b2a9c817bffaca76e9794" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.583325 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c29cfcd4-b473-4a8c-aeee-1a005b733524-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c29cfcd4-b473-4a8c-aeee-1a005b733524" (UID: "c29cfcd4-b473-4a8c-aeee-1a005b733524"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.609233 4935 scope.go:117] "RemoveContainer" containerID="2f46bcac1da623497e5eff63380447a1d49b6f9b0315ad277ab8224da8a424c4" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.637871 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c29cfcd4-b473-4a8c-aeee-1a005b733524-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.640613 4935 scope.go:117] "RemoveContainer" containerID="03707e7d4a8798ac3a165bd5428ddbe896026a263aebb402509b3d3424ac9396" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.670246 4935 scope.go:117] "RemoveContainer" containerID="f3db23aaed67fee3a7a725cb6a03b249f2b832610c9b2a9c817bffaca76e9794" Dec 01 19:06:20 crc kubenswrapper[4935]: E1201 19:06:20.670760 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3db23aaed67fee3a7a725cb6a03b249f2b832610c9b2a9c817bffaca76e9794\": container with ID starting with f3db23aaed67fee3a7a725cb6a03b249f2b832610c9b2a9c817bffaca76e9794 not found: ID does not exist" containerID="f3db23aaed67fee3a7a725cb6a03b249f2b832610c9b2a9c817bffaca76e9794" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.670799 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3db23aaed67fee3a7a725cb6a03b249f2b832610c9b2a9c817bffaca76e9794"} err="failed to get container status \"f3db23aaed67fee3a7a725cb6a03b249f2b832610c9b2a9c817bffaca76e9794\": rpc error: code = NotFound desc = could not find container \"f3db23aaed67fee3a7a725cb6a03b249f2b832610c9b2a9c817bffaca76e9794\": container with ID starting with f3db23aaed67fee3a7a725cb6a03b249f2b832610c9b2a9c817bffaca76e9794 not found: ID does not exist" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.670829 4935 scope.go:117] "RemoveContainer" containerID="2f46bcac1da623497e5eff63380447a1d49b6f9b0315ad277ab8224da8a424c4" Dec 01 19:06:20 crc kubenswrapper[4935]: E1201 19:06:20.671352 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f46bcac1da623497e5eff63380447a1d49b6f9b0315ad277ab8224da8a424c4\": container with ID starting with 2f46bcac1da623497e5eff63380447a1d49b6f9b0315ad277ab8224da8a424c4 not found: ID does not exist" containerID="2f46bcac1da623497e5eff63380447a1d49b6f9b0315ad277ab8224da8a424c4" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.671374 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f46bcac1da623497e5eff63380447a1d49b6f9b0315ad277ab8224da8a424c4"} err="failed to get container status \"2f46bcac1da623497e5eff63380447a1d49b6f9b0315ad277ab8224da8a424c4\": rpc error: code = NotFound desc = could not find container \"2f46bcac1da623497e5eff63380447a1d49b6f9b0315ad277ab8224da8a424c4\": container with ID starting with 2f46bcac1da623497e5eff63380447a1d49b6f9b0315ad277ab8224da8a424c4 not found: ID does not exist" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.671391 4935 scope.go:117] "RemoveContainer" containerID="03707e7d4a8798ac3a165bd5428ddbe896026a263aebb402509b3d3424ac9396" Dec 01 19:06:20 crc kubenswrapper[4935]: E1201 19:06:20.671676 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03707e7d4a8798ac3a165bd5428ddbe896026a263aebb402509b3d3424ac9396\": container with ID starting with 03707e7d4a8798ac3a165bd5428ddbe896026a263aebb402509b3d3424ac9396 not found: ID does not exist" containerID="03707e7d4a8798ac3a165bd5428ddbe896026a263aebb402509b3d3424ac9396" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.671726 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03707e7d4a8798ac3a165bd5428ddbe896026a263aebb402509b3d3424ac9396"} err="failed to get container status \"03707e7d4a8798ac3a165bd5428ddbe896026a263aebb402509b3d3424ac9396\": rpc error: code = NotFound desc = could not find container \"03707e7d4a8798ac3a165bd5428ddbe896026a263aebb402509b3d3424ac9396\": container with ID starting with 03707e7d4a8798ac3a165bd5428ddbe896026a263aebb402509b3d3424ac9396 not found: ID does not exist" Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.922368 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qptw6"] Dec 01 19:06:20 crc kubenswrapper[4935]: I1201 19:06:20.941968 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qptw6"] Dec 01 19:06:21 crc kubenswrapper[4935]: I1201 19:06:21.583866 4935 generic.go:334] "Generic (PLEG): container finished" podID="b977894a-ef33-4058-8342-119d22a7a4fd" containerID="a1478c5681196b5553d22089ef7332e6c9f8992d40dcf0d827fdb67be99d2a8a" exitCode=0 Dec 01 19:06:21 crc kubenswrapper[4935]: I1201 19:06:21.583961 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zrf6s" event={"ID":"b977894a-ef33-4058-8342-119d22a7a4fd","Type":"ContainerDied","Data":"a1478c5681196b5553d22089ef7332e6c9f8992d40dcf0d827fdb67be99d2a8a"} Dec 01 19:06:22 crc kubenswrapper[4935]: I1201 19:06:22.523279 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c29cfcd4-b473-4a8c-aeee-1a005b733524" path="/var/lib/kubelet/pods/c29cfcd4-b473-4a8c-aeee-1a005b733524/volumes" Dec 01 19:06:22 crc kubenswrapper[4935]: I1201 19:06:22.601291 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zrf6s" event={"ID":"b977894a-ef33-4058-8342-119d22a7a4fd","Type":"ContainerStarted","Data":"3a596a5824f5f80903b1ae80dbc538ba5bfe21fc875242362f99f27f33ba5546"} Dec 01 19:06:22 crc kubenswrapper[4935]: I1201 19:06:22.648818 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zrf6s" podStartSLOduration=3.011020747 podStartE2EDuration="5.648794412s" podCreationTimestamp="2025-12-01 19:06:17 +0000 UTC" firstStartedPulling="2025-12-01 19:06:19.541975906 +0000 UTC m=+2193.563605205" lastFinishedPulling="2025-12-01 19:06:22.179749611 +0000 UTC m=+2196.201378870" observedRunningTime="2025-12-01 19:06:22.617880905 +0000 UTC m=+2196.639510174" watchObservedRunningTime="2025-12-01 19:06:22.648794412 +0000 UTC m=+2196.670423671" Dec 01 19:06:24 crc kubenswrapper[4935]: I1201 19:06:24.345692 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:06:24 crc kubenswrapper[4935]: I1201 19:06:24.346094 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:06:27 crc kubenswrapper[4935]: I1201 19:06:27.483208 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:27 crc kubenswrapper[4935]: I1201 19:06:27.485507 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:27 crc kubenswrapper[4935]: I1201 19:06:27.556056 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:27 crc kubenswrapper[4935]: I1201 19:06:27.751538 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:28 crc kubenswrapper[4935]: I1201 19:06:28.024375 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zrf6s"] Dec 01 19:06:29 crc kubenswrapper[4935]: I1201 19:06:29.693180 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zrf6s" podUID="b977894a-ef33-4058-8342-119d22a7a4fd" containerName="registry-server" containerID="cri-o://3a596a5824f5f80903b1ae80dbc538ba5bfe21fc875242362f99f27f33ba5546" gracePeriod=2 Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.271544 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.324199 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b977894a-ef33-4058-8342-119d22a7a4fd-catalog-content\") pod \"b977894a-ef33-4058-8342-119d22a7a4fd\" (UID: \"b977894a-ef33-4058-8342-119d22a7a4fd\") " Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.324675 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b977894a-ef33-4058-8342-119d22a7a4fd-utilities\") pod \"b977894a-ef33-4058-8342-119d22a7a4fd\" (UID: \"b977894a-ef33-4058-8342-119d22a7a4fd\") " Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.324742 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7qdm\" (UniqueName: \"kubernetes.io/projected/b977894a-ef33-4058-8342-119d22a7a4fd-kube-api-access-b7qdm\") pod \"b977894a-ef33-4058-8342-119d22a7a4fd\" (UID: \"b977894a-ef33-4058-8342-119d22a7a4fd\") " Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.326069 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b977894a-ef33-4058-8342-119d22a7a4fd-utilities" (OuterVolumeSpecName: "utilities") pod "b977894a-ef33-4058-8342-119d22a7a4fd" (UID: "b977894a-ef33-4058-8342-119d22a7a4fd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.338643 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b977894a-ef33-4058-8342-119d22a7a4fd-kube-api-access-b7qdm" (OuterVolumeSpecName: "kube-api-access-b7qdm") pod "b977894a-ef33-4058-8342-119d22a7a4fd" (UID: "b977894a-ef33-4058-8342-119d22a7a4fd"). InnerVolumeSpecName "kube-api-access-b7qdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.378541 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b977894a-ef33-4058-8342-119d22a7a4fd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b977894a-ef33-4058-8342-119d22a7a4fd" (UID: "b977894a-ef33-4058-8342-119d22a7a4fd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.428234 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b977894a-ef33-4058-8342-119d22a7a4fd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.428284 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b977894a-ef33-4058-8342-119d22a7a4fd-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.428302 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7qdm\" (UniqueName: \"kubernetes.io/projected/b977894a-ef33-4058-8342-119d22a7a4fd-kube-api-access-b7qdm\") on node \"crc\" DevicePath \"\"" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.707018 4935 generic.go:334] "Generic (PLEG): container finished" podID="b977894a-ef33-4058-8342-119d22a7a4fd" containerID="3a596a5824f5f80903b1ae80dbc538ba5bfe21fc875242362f99f27f33ba5546" exitCode=0 Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.707070 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zrf6s" event={"ID":"b977894a-ef33-4058-8342-119d22a7a4fd","Type":"ContainerDied","Data":"3a596a5824f5f80903b1ae80dbc538ba5bfe21fc875242362f99f27f33ba5546"} Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.707168 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zrf6s" event={"ID":"b977894a-ef33-4058-8342-119d22a7a4fd","Type":"ContainerDied","Data":"666c71145f486b891280ae1d23630b27e0427cbc2c6d466fd41cd31e54f2bfbd"} Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.707191 4935 scope.go:117] "RemoveContainer" containerID="3a596a5824f5f80903b1ae80dbc538ba5bfe21fc875242362f99f27f33ba5546" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.707196 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zrf6s" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.739538 4935 scope.go:117] "RemoveContainer" containerID="a1478c5681196b5553d22089ef7332e6c9f8992d40dcf0d827fdb67be99d2a8a" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.742891 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zrf6s"] Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.766550 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zrf6s"] Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.772914 4935 scope.go:117] "RemoveContainer" containerID="86e739efc1012fce8aed1797dfc8e226e0d51786bb8819700c065a47d4be2ab1" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.843816 4935 scope.go:117] "RemoveContainer" containerID="3a596a5824f5f80903b1ae80dbc538ba5bfe21fc875242362f99f27f33ba5546" Dec 01 19:06:30 crc kubenswrapper[4935]: E1201 19:06:30.844483 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a596a5824f5f80903b1ae80dbc538ba5bfe21fc875242362f99f27f33ba5546\": container with ID starting with 3a596a5824f5f80903b1ae80dbc538ba5bfe21fc875242362f99f27f33ba5546 not found: ID does not exist" containerID="3a596a5824f5f80903b1ae80dbc538ba5bfe21fc875242362f99f27f33ba5546" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.844549 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a596a5824f5f80903b1ae80dbc538ba5bfe21fc875242362f99f27f33ba5546"} err="failed to get container status \"3a596a5824f5f80903b1ae80dbc538ba5bfe21fc875242362f99f27f33ba5546\": rpc error: code = NotFound desc = could not find container \"3a596a5824f5f80903b1ae80dbc538ba5bfe21fc875242362f99f27f33ba5546\": container with ID starting with 3a596a5824f5f80903b1ae80dbc538ba5bfe21fc875242362f99f27f33ba5546 not found: ID does not exist" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.844592 4935 scope.go:117] "RemoveContainer" containerID="a1478c5681196b5553d22089ef7332e6c9f8992d40dcf0d827fdb67be99d2a8a" Dec 01 19:06:30 crc kubenswrapper[4935]: E1201 19:06:30.845335 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1478c5681196b5553d22089ef7332e6c9f8992d40dcf0d827fdb67be99d2a8a\": container with ID starting with a1478c5681196b5553d22089ef7332e6c9f8992d40dcf0d827fdb67be99d2a8a not found: ID does not exist" containerID="a1478c5681196b5553d22089ef7332e6c9f8992d40dcf0d827fdb67be99d2a8a" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.845378 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1478c5681196b5553d22089ef7332e6c9f8992d40dcf0d827fdb67be99d2a8a"} err="failed to get container status \"a1478c5681196b5553d22089ef7332e6c9f8992d40dcf0d827fdb67be99d2a8a\": rpc error: code = NotFound desc = could not find container \"a1478c5681196b5553d22089ef7332e6c9f8992d40dcf0d827fdb67be99d2a8a\": container with ID starting with a1478c5681196b5553d22089ef7332e6c9f8992d40dcf0d827fdb67be99d2a8a not found: ID does not exist" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.845409 4935 scope.go:117] "RemoveContainer" containerID="86e739efc1012fce8aed1797dfc8e226e0d51786bb8819700c065a47d4be2ab1" Dec 01 19:06:30 crc kubenswrapper[4935]: E1201 19:06:30.845684 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86e739efc1012fce8aed1797dfc8e226e0d51786bb8819700c065a47d4be2ab1\": container with ID starting with 86e739efc1012fce8aed1797dfc8e226e0d51786bb8819700c065a47d4be2ab1 not found: ID does not exist" containerID="86e739efc1012fce8aed1797dfc8e226e0d51786bb8819700c065a47d4be2ab1" Dec 01 19:06:30 crc kubenswrapper[4935]: I1201 19:06:30.845724 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86e739efc1012fce8aed1797dfc8e226e0d51786bb8819700c065a47d4be2ab1"} err="failed to get container status \"86e739efc1012fce8aed1797dfc8e226e0d51786bb8819700c065a47d4be2ab1\": rpc error: code = NotFound desc = could not find container \"86e739efc1012fce8aed1797dfc8e226e0d51786bb8819700c065a47d4be2ab1\": container with ID starting with 86e739efc1012fce8aed1797dfc8e226e0d51786bb8819700c065a47d4be2ab1 not found: ID does not exist" Dec 01 19:06:32 crc kubenswrapper[4935]: I1201 19:06:32.534349 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b977894a-ef33-4058-8342-119d22a7a4fd" path="/var/lib/kubelet/pods/b977894a-ef33-4058-8342-119d22a7a4fd/volumes" Dec 01 19:06:34 crc kubenswrapper[4935]: I1201 19:06:34.063366 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-a212-account-create-update-54lf7"] Dec 01 19:06:34 crc kubenswrapper[4935]: I1201 19:06:34.075427 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-a212-account-create-update-54lf7"] Dec 01 19:06:34 crc kubenswrapper[4935]: I1201 19:06:34.522945 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="763761ce-4c4b-4235-a069-8a5161e5b099" path="/var/lib/kubelet/pods/763761ce-4c4b-4235-a069-8a5161e5b099/volumes" Dec 01 19:06:35 crc kubenswrapper[4935]: I1201 19:06:35.052207 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-cwgjz"] Dec 01 19:06:35 crc kubenswrapper[4935]: I1201 19:06:35.065292 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-cwgjz"] Dec 01 19:06:36 crc kubenswrapper[4935]: I1201 19:06:36.529718 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3d074f6-a222-4e33-980c-3031ca1ad334" path="/var/lib/kubelet/pods/e3d074f6-a222-4e33-980c-3031ca1ad334/volumes" Dec 01 19:06:41 crc kubenswrapper[4935]: I1201 19:06:41.036669 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xxfm9"] Dec 01 19:06:41 crc kubenswrapper[4935]: I1201 19:06:41.046711 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xxfm9"] Dec 01 19:06:42 crc kubenswrapper[4935]: I1201 19:06:42.047663 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-hfpj6"] Dec 01 19:06:42 crc kubenswrapper[4935]: I1201 19:06:42.061631 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-hfpj6"] Dec 01 19:06:42 crc kubenswrapper[4935]: I1201 19:06:42.525329 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="035b262e-2127-4878-b0c5-fe6374f824a8" path="/var/lib/kubelet/pods/035b262e-2127-4878-b0c5-fe6374f824a8/volumes" Dec 01 19:06:42 crc kubenswrapper[4935]: I1201 19:06:42.526122 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1" path="/var/lib/kubelet/pods/c9fcb6ef-37b9-4231-b5d5-b8e02184f2e1/volumes" Dec 01 19:06:54 crc kubenswrapper[4935]: I1201 19:06:54.347107 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:06:54 crc kubenswrapper[4935]: I1201 19:06:54.347995 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:07:09 crc kubenswrapper[4935]: I1201 19:07:09.551486 4935 scope.go:117] "RemoveContainer" containerID="42565f5879b10e2cf817587d8b53a93c570f58e300709af98b2d483a77b817e3" Dec 01 19:07:09 crc kubenswrapper[4935]: I1201 19:07:09.592777 4935 scope.go:117] "RemoveContainer" containerID="9b45cf6363caf1545d66a95580099b8f744adba96b4dad41d42b6c531c5b9bcd" Dec 01 19:07:09 crc kubenswrapper[4935]: I1201 19:07:09.695041 4935 scope.go:117] "RemoveContainer" containerID="a3c467cfcf687b268526e6c2e5cea47d102b19abdc4539874d751d5131dd8762" Dec 01 19:07:09 crc kubenswrapper[4935]: I1201 19:07:09.737210 4935 scope.go:117] "RemoveContainer" containerID="1ee441b479e2a8d7aa95eb618469716ef34f1f1ce33464ca2a21a909519d9708" Dec 01 19:07:09 crc kubenswrapper[4935]: I1201 19:07:09.805369 4935 scope.go:117] "RemoveContainer" containerID="f82df44411d1819120b7b4916c8618b2d42060c04437159f888a29e943a92d2c" Dec 01 19:07:17 crc kubenswrapper[4935]: I1201 19:07:17.339916 4935 generic.go:334] "Generic (PLEG): container finished" podID="0048b1eb-735d-437d-b7bd-ad2814905c56" containerID="79d3d06b36e281957148bd1c6b2bbd2e0f2407e2e03f0f833bea73c31c1f4579" exitCode=0 Dec 01 19:07:17 crc kubenswrapper[4935]: I1201 19:07:17.340020 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" event={"ID":"0048b1eb-735d-437d-b7bd-ad2814905c56","Type":"ContainerDied","Data":"79d3d06b36e281957148bd1c6b2bbd2e0f2407e2e03f0f833bea73c31c1f4579"} Dec 01 19:07:18 crc kubenswrapper[4935]: I1201 19:07:18.917967 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.057075 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0048b1eb-735d-437d-b7bd-ad2814905c56-inventory\") pod \"0048b1eb-735d-437d-b7bd-ad2814905c56\" (UID: \"0048b1eb-735d-437d-b7bd-ad2814905c56\") " Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.057225 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnwjl\" (UniqueName: \"kubernetes.io/projected/0048b1eb-735d-437d-b7bd-ad2814905c56-kube-api-access-nnwjl\") pod \"0048b1eb-735d-437d-b7bd-ad2814905c56\" (UID: \"0048b1eb-735d-437d-b7bd-ad2814905c56\") " Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.058312 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0048b1eb-735d-437d-b7bd-ad2814905c56-ssh-key\") pod \"0048b1eb-735d-437d-b7bd-ad2814905c56\" (UID: \"0048b1eb-735d-437d-b7bd-ad2814905c56\") " Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.064978 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0048b1eb-735d-437d-b7bd-ad2814905c56-kube-api-access-nnwjl" (OuterVolumeSpecName: "kube-api-access-nnwjl") pod "0048b1eb-735d-437d-b7bd-ad2814905c56" (UID: "0048b1eb-735d-437d-b7bd-ad2814905c56"). InnerVolumeSpecName "kube-api-access-nnwjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.108869 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0048b1eb-735d-437d-b7bd-ad2814905c56-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0048b1eb-735d-437d-b7bd-ad2814905c56" (UID: "0048b1eb-735d-437d-b7bd-ad2814905c56"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.118699 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0048b1eb-735d-437d-b7bd-ad2814905c56-inventory" (OuterVolumeSpecName: "inventory") pod "0048b1eb-735d-437d-b7bd-ad2814905c56" (UID: "0048b1eb-735d-437d-b7bd-ad2814905c56"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.162033 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0048b1eb-735d-437d-b7bd-ad2814905c56-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.162065 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0048b1eb-735d-437d-b7bd-ad2814905c56-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.162075 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnwjl\" (UniqueName: \"kubernetes.io/projected/0048b1eb-735d-437d-b7bd-ad2814905c56-kube-api-access-nnwjl\") on node \"crc\" DevicePath \"\"" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.361792 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" event={"ID":"0048b1eb-735d-437d-b7bd-ad2814905c56","Type":"ContainerDied","Data":"bd8b26afd4fd2de9be8d8e1eefd7cdbbca2f57023fd569d528d3eb540ba68aaf"} Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.361830 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd8b26afd4fd2de9be8d8e1eefd7cdbbca2f57023fd569d528d3eb540ba68aaf" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.361934 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.493695 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj"] Dec 01 19:07:19 crc kubenswrapper[4935]: E1201 19:07:19.494240 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c29cfcd4-b473-4a8c-aeee-1a005b733524" containerName="registry-server" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.494257 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c29cfcd4-b473-4a8c-aeee-1a005b733524" containerName="registry-server" Dec 01 19:07:19 crc kubenswrapper[4935]: E1201 19:07:19.494295 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0048b1eb-735d-437d-b7bd-ad2814905c56" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.494305 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="0048b1eb-735d-437d-b7bd-ad2814905c56" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 01 19:07:19 crc kubenswrapper[4935]: E1201 19:07:19.494317 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c29cfcd4-b473-4a8c-aeee-1a005b733524" containerName="extract-content" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.494324 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c29cfcd4-b473-4a8c-aeee-1a005b733524" containerName="extract-content" Dec 01 19:07:19 crc kubenswrapper[4935]: E1201 19:07:19.494338 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c29cfcd4-b473-4a8c-aeee-1a005b733524" containerName="extract-utilities" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.494345 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c29cfcd4-b473-4a8c-aeee-1a005b733524" containerName="extract-utilities" Dec 01 19:07:19 crc kubenswrapper[4935]: E1201 19:07:19.494383 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b977894a-ef33-4058-8342-119d22a7a4fd" containerName="extract-utilities" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.494389 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b977894a-ef33-4058-8342-119d22a7a4fd" containerName="extract-utilities" Dec 01 19:07:19 crc kubenswrapper[4935]: E1201 19:07:19.494408 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b977894a-ef33-4058-8342-119d22a7a4fd" containerName="extract-content" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.494415 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b977894a-ef33-4058-8342-119d22a7a4fd" containerName="extract-content" Dec 01 19:07:19 crc kubenswrapper[4935]: E1201 19:07:19.494424 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b977894a-ef33-4058-8342-119d22a7a4fd" containerName="registry-server" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.494430 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="b977894a-ef33-4058-8342-119d22a7a4fd" containerName="registry-server" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.494656 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="b977894a-ef33-4058-8342-119d22a7a4fd" containerName="registry-server" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.494671 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="0048b1eb-735d-437d-b7bd-ad2814905c56" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.494682 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c29cfcd4-b473-4a8c-aeee-1a005b733524" containerName="registry-server" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.495501 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.498776 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.499021 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.499886 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.505689 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.508898 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj"] Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.675252 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2fb18d0d-5377-4c16-86bb-8265ddf71223-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj\" (UID: \"2fb18d0d-5377-4c16-86bb-8265ddf71223\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.675632 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fb18d0d-5377-4c16-86bb-8265ddf71223-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj\" (UID: \"2fb18d0d-5377-4c16-86bb-8265ddf71223\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.676366 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwl52\" (UniqueName: \"kubernetes.io/projected/2fb18d0d-5377-4c16-86bb-8265ddf71223-kube-api-access-nwl52\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj\" (UID: \"2fb18d0d-5377-4c16-86bb-8265ddf71223\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.778193 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwl52\" (UniqueName: \"kubernetes.io/projected/2fb18d0d-5377-4c16-86bb-8265ddf71223-kube-api-access-nwl52\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj\" (UID: \"2fb18d0d-5377-4c16-86bb-8265ddf71223\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.778523 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2fb18d0d-5377-4c16-86bb-8265ddf71223-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj\" (UID: \"2fb18d0d-5377-4c16-86bb-8265ddf71223\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.778607 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fb18d0d-5377-4c16-86bb-8265ddf71223-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj\" (UID: \"2fb18d0d-5377-4c16-86bb-8265ddf71223\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.782650 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fb18d0d-5377-4c16-86bb-8265ddf71223-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj\" (UID: \"2fb18d0d-5377-4c16-86bb-8265ddf71223\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.782998 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2fb18d0d-5377-4c16-86bb-8265ddf71223-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj\" (UID: \"2fb18d0d-5377-4c16-86bb-8265ddf71223\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.795739 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwl52\" (UniqueName: \"kubernetes.io/projected/2fb18d0d-5377-4c16-86bb-8265ddf71223-kube-api-access-nwl52\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj\" (UID: \"2fb18d0d-5377-4c16-86bb-8265ddf71223\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" Dec 01 19:07:19 crc kubenswrapper[4935]: I1201 19:07:19.814968 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" Dec 01 19:07:20 crc kubenswrapper[4935]: I1201 19:07:20.467958 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj"] Dec 01 19:07:21 crc kubenswrapper[4935]: I1201 19:07:21.386038 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" event={"ID":"2fb18d0d-5377-4c16-86bb-8265ddf71223","Type":"ContainerStarted","Data":"fe8d4384216ebe94889909d3ba143d7b3ba865fa3187f59dd86cbcfd65add7c5"} Dec 01 19:07:21 crc kubenswrapper[4935]: I1201 19:07:21.386993 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" event={"ID":"2fb18d0d-5377-4c16-86bb-8265ddf71223","Type":"ContainerStarted","Data":"30ca37ff2f05ec8c106280ad6c4b55185faebee58d1b318289a5967242756be7"} Dec 01 19:07:21 crc kubenswrapper[4935]: I1201 19:07:21.406318 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" podStartSLOduration=1.833963893 podStartE2EDuration="2.406289047s" podCreationTimestamp="2025-12-01 19:07:19 +0000 UTC" firstStartedPulling="2025-12-01 19:07:20.481353912 +0000 UTC m=+2254.502983181" lastFinishedPulling="2025-12-01 19:07:21.053679076 +0000 UTC m=+2255.075308335" observedRunningTime="2025-12-01 19:07:21.401116434 +0000 UTC m=+2255.422745783" watchObservedRunningTime="2025-12-01 19:07:21.406289047 +0000 UTC m=+2255.427918346" Dec 01 19:07:24 crc kubenswrapper[4935]: I1201 19:07:24.346855 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:07:24 crc kubenswrapper[4935]: I1201 19:07:24.349403 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:07:24 crc kubenswrapper[4935]: I1201 19:07:24.349644 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 19:07:24 crc kubenswrapper[4935]: I1201 19:07:24.351127 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:07:24 crc kubenswrapper[4935]: I1201 19:07:24.351514 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" gracePeriod=600 Dec 01 19:07:24 crc kubenswrapper[4935]: E1201 19:07:24.492002 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:07:25 crc kubenswrapper[4935]: I1201 19:07:25.453203 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" exitCode=0 Dec 01 19:07:25 crc kubenswrapper[4935]: I1201 19:07:25.453293 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4"} Dec 01 19:07:25 crc kubenswrapper[4935]: I1201 19:07:25.453739 4935 scope.go:117] "RemoveContainer" containerID="65f55d9794ae9f16c4bcf6f9b1370c7d8f50ea2498f356417eb8e5a61df9d9bb" Dec 01 19:07:25 crc kubenswrapper[4935]: I1201 19:07:25.457199 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:07:25 crc kubenswrapper[4935]: E1201 19:07:25.458708 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:07:27 crc kubenswrapper[4935]: I1201 19:07:27.061547 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-z4zxp"] Dec 01 19:07:27 crc kubenswrapper[4935]: I1201 19:07:27.071579 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-z4zxp"] Dec 01 19:07:27 crc kubenswrapper[4935]: I1201 19:07:27.489560 4935 generic.go:334] "Generic (PLEG): container finished" podID="2fb18d0d-5377-4c16-86bb-8265ddf71223" containerID="fe8d4384216ebe94889909d3ba143d7b3ba865fa3187f59dd86cbcfd65add7c5" exitCode=0 Dec 01 19:07:27 crc kubenswrapper[4935]: I1201 19:07:27.489643 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" event={"ID":"2fb18d0d-5377-4c16-86bb-8265ddf71223","Type":"ContainerDied","Data":"fe8d4384216ebe94889909d3ba143d7b3ba865fa3187f59dd86cbcfd65add7c5"} Dec 01 19:07:28 crc kubenswrapper[4935]: I1201 19:07:28.542976 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6d04009-17df-4097-bc8b-4bea0e885074" path="/var/lib/kubelet/pods/a6d04009-17df-4097-bc8b-4bea0e885074/volumes" Dec 01 19:07:28 crc kubenswrapper[4935]: I1201 19:07:28.995033 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.158782 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fb18d0d-5377-4c16-86bb-8265ddf71223-ssh-key\") pod \"2fb18d0d-5377-4c16-86bb-8265ddf71223\" (UID: \"2fb18d0d-5377-4c16-86bb-8265ddf71223\") " Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.158901 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwl52\" (UniqueName: \"kubernetes.io/projected/2fb18d0d-5377-4c16-86bb-8265ddf71223-kube-api-access-nwl52\") pod \"2fb18d0d-5377-4c16-86bb-8265ddf71223\" (UID: \"2fb18d0d-5377-4c16-86bb-8265ddf71223\") " Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.159007 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2fb18d0d-5377-4c16-86bb-8265ddf71223-inventory\") pod \"2fb18d0d-5377-4c16-86bb-8265ddf71223\" (UID: \"2fb18d0d-5377-4c16-86bb-8265ddf71223\") " Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.171827 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fb18d0d-5377-4c16-86bb-8265ddf71223-kube-api-access-nwl52" (OuterVolumeSpecName: "kube-api-access-nwl52") pod "2fb18d0d-5377-4c16-86bb-8265ddf71223" (UID: "2fb18d0d-5377-4c16-86bb-8265ddf71223"). InnerVolumeSpecName "kube-api-access-nwl52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.189915 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fb18d0d-5377-4c16-86bb-8265ddf71223-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2fb18d0d-5377-4c16-86bb-8265ddf71223" (UID: "2fb18d0d-5377-4c16-86bb-8265ddf71223"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.233006 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fb18d0d-5377-4c16-86bb-8265ddf71223-inventory" (OuterVolumeSpecName: "inventory") pod "2fb18d0d-5377-4c16-86bb-8265ddf71223" (UID: "2fb18d0d-5377-4c16-86bb-8265ddf71223"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.262371 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2fb18d0d-5377-4c16-86bb-8265ddf71223-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.262405 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fb18d0d-5377-4c16-86bb-8265ddf71223-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.262420 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwl52\" (UniqueName: \"kubernetes.io/projected/2fb18d0d-5377-4c16-86bb-8265ddf71223-kube-api-access-nwl52\") on node \"crc\" DevicePath \"\"" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.520209 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" event={"ID":"2fb18d0d-5377-4c16-86bb-8265ddf71223","Type":"ContainerDied","Data":"30ca37ff2f05ec8c106280ad6c4b55185faebee58d1b318289a5967242756be7"} Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.520263 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30ca37ff2f05ec8c106280ad6c4b55185faebee58d1b318289a5967242756be7" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.520847 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.637470 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7"] Dec 01 19:07:29 crc kubenswrapper[4935]: E1201 19:07:29.638247 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fb18d0d-5377-4c16-86bb-8265ddf71223" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.638266 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fb18d0d-5377-4c16-86bb-8265ddf71223" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.638558 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fb18d0d-5377-4c16-86bb-8265ddf71223" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.647549 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.652562 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.652804 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.653014 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.653179 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.654052 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7"] Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.775334 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw966\" (UniqueName: \"kubernetes.io/projected/4602269a-c17c-4ef2-9484-645469b97214-kube-api-access-kw966\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gcqp7\" (UID: \"4602269a-c17c-4ef2-9484-645469b97214\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.775707 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4602269a-c17c-4ef2-9484-645469b97214-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gcqp7\" (UID: \"4602269a-c17c-4ef2-9484-645469b97214\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.775777 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4602269a-c17c-4ef2-9484-645469b97214-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gcqp7\" (UID: \"4602269a-c17c-4ef2-9484-645469b97214\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.877451 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw966\" (UniqueName: \"kubernetes.io/projected/4602269a-c17c-4ef2-9484-645469b97214-kube-api-access-kw966\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gcqp7\" (UID: \"4602269a-c17c-4ef2-9484-645469b97214\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.877588 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4602269a-c17c-4ef2-9484-645469b97214-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gcqp7\" (UID: \"4602269a-c17c-4ef2-9484-645469b97214\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.877610 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4602269a-c17c-4ef2-9484-645469b97214-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gcqp7\" (UID: \"4602269a-c17c-4ef2-9484-645469b97214\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.884551 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4602269a-c17c-4ef2-9484-645469b97214-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gcqp7\" (UID: \"4602269a-c17c-4ef2-9484-645469b97214\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.892665 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4602269a-c17c-4ef2-9484-645469b97214-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gcqp7\" (UID: \"4602269a-c17c-4ef2-9484-645469b97214\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.895444 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw966\" (UniqueName: \"kubernetes.io/projected/4602269a-c17c-4ef2-9484-645469b97214-kube-api-access-kw966\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-gcqp7\" (UID: \"4602269a-c17c-4ef2-9484-645469b97214\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" Dec 01 19:07:29 crc kubenswrapper[4935]: I1201 19:07:29.973388 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" Dec 01 19:07:30 crc kubenswrapper[4935]: I1201 19:07:30.368112 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7"] Dec 01 19:07:30 crc kubenswrapper[4935]: W1201 19:07:30.375428 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4602269a_c17c_4ef2_9484_645469b97214.slice/crio-4a30e60988b7f8a10fcbe8a8faad88501c20326a454daad69da5330b62dd06a3 WatchSource:0}: Error finding container 4a30e60988b7f8a10fcbe8a8faad88501c20326a454daad69da5330b62dd06a3: Status 404 returned error can't find the container with id 4a30e60988b7f8a10fcbe8a8faad88501c20326a454daad69da5330b62dd06a3 Dec 01 19:07:30 crc kubenswrapper[4935]: I1201 19:07:30.539474 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" event={"ID":"4602269a-c17c-4ef2-9484-645469b97214","Type":"ContainerStarted","Data":"4a30e60988b7f8a10fcbe8a8faad88501c20326a454daad69da5330b62dd06a3"} Dec 01 19:07:31 crc kubenswrapper[4935]: I1201 19:07:31.551004 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" event={"ID":"4602269a-c17c-4ef2-9484-645469b97214","Type":"ContainerStarted","Data":"9a50c8faf4363c9f48b8434495f5dad3c3ee114ebbc298ce492852623988f679"} Dec 01 19:07:31 crc kubenswrapper[4935]: I1201 19:07:31.574872 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" podStartSLOduration=1.867739796 podStartE2EDuration="2.574847358s" podCreationTimestamp="2025-12-01 19:07:29 +0000 UTC" firstStartedPulling="2025-12-01 19:07:30.378767307 +0000 UTC m=+2264.400396576" lastFinishedPulling="2025-12-01 19:07:31.085874839 +0000 UTC m=+2265.107504138" observedRunningTime="2025-12-01 19:07:31.573416553 +0000 UTC m=+2265.595045832" watchObservedRunningTime="2025-12-01 19:07:31.574847358 +0000 UTC m=+2265.596476637" Dec 01 19:07:36 crc kubenswrapper[4935]: E1201 19:07:36.133328 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Dec 01 19:07:38 crc kubenswrapper[4935]: I1201 19:07:38.510677 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:07:38 crc kubenswrapper[4935]: E1201 19:07:38.512089 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:07:50 crc kubenswrapper[4935]: I1201 19:07:50.509586 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:07:50 crc kubenswrapper[4935]: E1201 19:07:50.510617 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:08:02 crc kubenswrapper[4935]: I1201 19:08:02.508573 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:08:02 crc kubenswrapper[4935]: E1201 19:08:02.509435 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:08:10 crc kubenswrapper[4935]: I1201 19:08:10.025054 4935 scope.go:117] "RemoveContainer" containerID="84c6a92829b5dbc587e82d8c31276c2867df34c21bdb828f4d5ec3eec8fa15aa" Dec 01 19:08:14 crc kubenswrapper[4935]: I1201 19:08:14.172485 4935 generic.go:334] "Generic (PLEG): container finished" podID="4602269a-c17c-4ef2-9484-645469b97214" containerID="9a50c8faf4363c9f48b8434495f5dad3c3ee114ebbc298ce492852623988f679" exitCode=0 Dec 01 19:08:14 crc kubenswrapper[4935]: I1201 19:08:14.172614 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" event={"ID":"4602269a-c17c-4ef2-9484-645469b97214","Type":"ContainerDied","Data":"9a50c8faf4363c9f48b8434495f5dad3c3ee114ebbc298ce492852623988f679"} Dec 01 19:08:15 crc kubenswrapper[4935]: I1201 19:08:15.644772 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" Dec 01 19:08:15 crc kubenswrapper[4935]: I1201 19:08:15.793035 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4602269a-c17c-4ef2-9484-645469b97214-inventory\") pod \"4602269a-c17c-4ef2-9484-645469b97214\" (UID: \"4602269a-c17c-4ef2-9484-645469b97214\") " Dec 01 19:08:15 crc kubenswrapper[4935]: I1201 19:08:15.793261 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4602269a-c17c-4ef2-9484-645469b97214-ssh-key\") pod \"4602269a-c17c-4ef2-9484-645469b97214\" (UID: \"4602269a-c17c-4ef2-9484-645469b97214\") " Dec 01 19:08:15 crc kubenswrapper[4935]: I1201 19:08:15.793415 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kw966\" (UniqueName: \"kubernetes.io/projected/4602269a-c17c-4ef2-9484-645469b97214-kube-api-access-kw966\") pod \"4602269a-c17c-4ef2-9484-645469b97214\" (UID: \"4602269a-c17c-4ef2-9484-645469b97214\") " Dec 01 19:08:15 crc kubenswrapper[4935]: I1201 19:08:15.799705 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4602269a-c17c-4ef2-9484-645469b97214-kube-api-access-kw966" (OuterVolumeSpecName: "kube-api-access-kw966") pod "4602269a-c17c-4ef2-9484-645469b97214" (UID: "4602269a-c17c-4ef2-9484-645469b97214"). InnerVolumeSpecName "kube-api-access-kw966". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:08:15 crc kubenswrapper[4935]: I1201 19:08:15.826707 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4602269a-c17c-4ef2-9484-645469b97214-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4602269a-c17c-4ef2-9484-645469b97214" (UID: "4602269a-c17c-4ef2-9484-645469b97214"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:08:15 crc kubenswrapper[4935]: I1201 19:08:15.836373 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4602269a-c17c-4ef2-9484-645469b97214-inventory" (OuterVolumeSpecName: "inventory") pod "4602269a-c17c-4ef2-9484-645469b97214" (UID: "4602269a-c17c-4ef2-9484-645469b97214"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:08:15 crc kubenswrapper[4935]: I1201 19:08:15.902877 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4602269a-c17c-4ef2-9484-645469b97214-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:08:15 crc kubenswrapper[4935]: I1201 19:08:15.902928 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4602269a-c17c-4ef2-9484-645469b97214-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:08:15 crc kubenswrapper[4935]: I1201 19:08:15.902952 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kw966\" (UniqueName: \"kubernetes.io/projected/4602269a-c17c-4ef2-9484-645469b97214-kube-api-access-kw966\") on node \"crc\" DevicePath \"\"" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.201292 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" event={"ID":"4602269a-c17c-4ef2-9484-645469b97214","Type":"ContainerDied","Data":"4a30e60988b7f8a10fcbe8a8faad88501c20326a454daad69da5330b62dd06a3"} Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.201415 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a30e60988b7f8a10fcbe8a8faad88501c20326a454daad69da5330b62dd06a3" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.201833 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-gcqp7" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.327049 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc"] Dec 01 19:08:16 crc kubenswrapper[4935]: E1201 19:08:16.327887 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4602269a-c17c-4ef2-9484-645469b97214" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.327913 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="4602269a-c17c-4ef2-9484-645469b97214" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.328326 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="4602269a-c17c-4ef2-9484-645469b97214" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.329292 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.331642 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.331756 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.332118 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.332143 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.338186 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc"] Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.425349 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkjv8\" (UniqueName: \"kubernetes.io/projected/8de36041-d199-4faa-91f8-c5b974a39b83-kube-api-access-rkjv8\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc\" (UID: \"8de36041-d199-4faa-91f8-c5b974a39b83\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.425425 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8de36041-d199-4faa-91f8-c5b974a39b83-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc\" (UID: \"8de36041-d199-4faa-91f8-c5b974a39b83\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.425469 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8de36041-d199-4faa-91f8-c5b974a39b83-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc\" (UID: \"8de36041-d199-4faa-91f8-c5b974a39b83\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.516034 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:08:16 crc kubenswrapper[4935]: E1201 19:08:16.516346 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.529861 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8de36041-d199-4faa-91f8-c5b974a39b83-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc\" (UID: \"8de36041-d199-4faa-91f8-c5b974a39b83\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.529918 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8de36041-d199-4faa-91f8-c5b974a39b83-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc\" (UID: \"8de36041-d199-4faa-91f8-c5b974a39b83\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.530396 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkjv8\" (UniqueName: \"kubernetes.io/projected/8de36041-d199-4faa-91f8-c5b974a39b83-kube-api-access-rkjv8\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc\" (UID: \"8de36041-d199-4faa-91f8-c5b974a39b83\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.535388 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8de36041-d199-4faa-91f8-c5b974a39b83-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc\" (UID: \"8de36041-d199-4faa-91f8-c5b974a39b83\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.550580 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8de36041-d199-4faa-91f8-c5b974a39b83-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc\" (UID: \"8de36041-d199-4faa-91f8-c5b974a39b83\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.555612 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkjv8\" (UniqueName: \"kubernetes.io/projected/8de36041-d199-4faa-91f8-c5b974a39b83-kube-api-access-rkjv8\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc\" (UID: \"8de36041-d199-4faa-91f8-c5b974a39b83\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" Dec 01 19:08:16 crc kubenswrapper[4935]: I1201 19:08:16.666720 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" Dec 01 19:08:17 crc kubenswrapper[4935]: I1201 19:08:17.300255 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc"] Dec 01 19:08:18 crc kubenswrapper[4935]: I1201 19:08:18.223293 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" event={"ID":"8de36041-d199-4faa-91f8-c5b974a39b83","Type":"ContainerStarted","Data":"9f1b4684561880feda95d47eae3cf67ca7a8b787be7ea095c97d19937b9c8d6c"} Dec 01 19:08:18 crc kubenswrapper[4935]: I1201 19:08:18.224591 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" event={"ID":"8de36041-d199-4faa-91f8-c5b974a39b83","Type":"ContainerStarted","Data":"ff5f9aa7dd7c59537059de6b4c5bfbd15bb68ff4423f3890931153c4140f3ee9"} Dec 01 19:08:18 crc kubenswrapper[4935]: I1201 19:08:18.260422 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" podStartSLOduration=1.7984647059999999 podStartE2EDuration="2.260397172s" podCreationTimestamp="2025-12-01 19:08:16 +0000 UTC" firstStartedPulling="2025-12-01 19:08:17.305287214 +0000 UTC m=+2311.326916473" lastFinishedPulling="2025-12-01 19:08:17.76721964 +0000 UTC m=+2311.788848939" observedRunningTime="2025-12-01 19:08:18.248535337 +0000 UTC m=+2312.270164596" watchObservedRunningTime="2025-12-01 19:08:18.260397172 +0000 UTC m=+2312.282026471" Dec 01 19:08:29 crc kubenswrapper[4935]: I1201 19:08:29.508340 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:08:29 crc kubenswrapper[4935]: E1201 19:08:29.509502 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:08:41 crc kubenswrapper[4935]: I1201 19:08:41.508665 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:08:41 crc kubenswrapper[4935]: E1201 19:08:41.509471 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:08:55 crc kubenswrapper[4935]: I1201 19:08:55.509735 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:08:55 crc kubenswrapper[4935]: E1201 19:08:55.510877 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:09:07 crc kubenswrapper[4935]: I1201 19:09:07.509081 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:09:07 crc kubenswrapper[4935]: E1201 19:09:07.510058 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:09:17 crc kubenswrapper[4935]: I1201 19:09:17.005077 4935 generic.go:334] "Generic (PLEG): container finished" podID="8de36041-d199-4faa-91f8-c5b974a39b83" containerID="9f1b4684561880feda95d47eae3cf67ca7a8b787be7ea095c97d19937b9c8d6c" exitCode=0 Dec 01 19:09:17 crc kubenswrapper[4935]: I1201 19:09:17.005193 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" event={"ID":"8de36041-d199-4faa-91f8-c5b974a39b83","Type":"ContainerDied","Data":"9f1b4684561880feda95d47eae3cf67ca7a8b787be7ea095c97d19937b9c8d6c"} Dec 01 19:09:18 crc kubenswrapper[4935]: I1201 19:09:18.519324 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" Dec 01 19:09:18 crc kubenswrapper[4935]: I1201 19:09:18.640614 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8de36041-d199-4faa-91f8-c5b974a39b83-inventory\") pod \"8de36041-d199-4faa-91f8-c5b974a39b83\" (UID: \"8de36041-d199-4faa-91f8-c5b974a39b83\") " Dec 01 19:09:18 crc kubenswrapper[4935]: I1201 19:09:18.640760 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkjv8\" (UniqueName: \"kubernetes.io/projected/8de36041-d199-4faa-91f8-c5b974a39b83-kube-api-access-rkjv8\") pod \"8de36041-d199-4faa-91f8-c5b974a39b83\" (UID: \"8de36041-d199-4faa-91f8-c5b974a39b83\") " Dec 01 19:09:18 crc kubenswrapper[4935]: I1201 19:09:18.641017 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8de36041-d199-4faa-91f8-c5b974a39b83-ssh-key\") pod \"8de36041-d199-4faa-91f8-c5b974a39b83\" (UID: \"8de36041-d199-4faa-91f8-c5b974a39b83\") " Dec 01 19:09:18 crc kubenswrapper[4935]: I1201 19:09:18.648478 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8de36041-d199-4faa-91f8-c5b974a39b83-kube-api-access-rkjv8" (OuterVolumeSpecName: "kube-api-access-rkjv8") pod "8de36041-d199-4faa-91f8-c5b974a39b83" (UID: "8de36041-d199-4faa-91f8-c5b974a39b83"). InnerVolumeSpecName "kube-api-access-rkjv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:09:18 crc kubenswrapper[4935]: I1201 19:09:18.688231 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8de36041-d199-4faa-91f8-c5b974a39b83-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8de36041-d199-4faa-91f8-c5b974a39b83" (UID: "8de36041-d199-4faa-91f8-c5b974a39b83"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:09:18 crc kubenswrapper[4935]: I1201 19:09:18.704331 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8de36041-d199-4faa-91f8-c5b974a39b83-inventory" (OuterVolumeSpecName: "inventory") pod "8de36041-d199-4faa-91f8-c5b974a39b83" (UID: "8de36041-d199-4faa-91f8-c5b974a39b83"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:09:18 crc kubenswrapper[4935]: I1201 19:09:18.743900 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8de36041-d199-4faa-91f8-c5b974a39b83-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:09:18 crc kubenswrapper[4935]: I1201 19:09:18.743934 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8de36041-d199-4faa-91f8-c5b974a39b83-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:09:18 crc kubenswrapper[4935]: I1201 19:09:18.743945 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkjv8\" (UniqueName: \"kubernetes.io/projected/8de36041-d199-4faa-91f8-c5b974a39b83-kube-api-access-rkjv8\") on node \"crc\" DevicePath \"\"" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.031507 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" event={"ID":"8de36041-d199-4faa-91f8-c5b974a39b83","Type":"ContainerDied","Data":"ff5f9aa7dd7c59537059de6b4c5bfbd15bb68ff4423f3890931153c4140f3ee9"} Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.031760 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff5f9aa7dd7c59537059de6b4c5bfbd15bb68ff4423f3890931153c4140f3ee9" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.031607 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.154497 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-h52hn"] Dec 01 19:09:19 crc kubenswrapper[4935]: E1201 19:09:19.154974 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8de36041-d199-4faa-91f8-c5b974a39b83" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.154989 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8de36041-d199-4faa-91f8-c5b974a39b83" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.155238 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="8de36041-d199-4faa-91f8-c5b974a39b83" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.155992 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.158540 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.158808 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.158919 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.158821 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.183068 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-h52hn"] Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.255730 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-h52hn\" (UID: \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\") " pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.255798 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-h52hn\" (UID: \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\") " pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.256105 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqzfw\" (UniqueName: \"kubernetes.io/projected/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-kube-api-access-sqzfw\") pod \"ssh-known-hosts-edpm-deployment-h52hn\" (UID: \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\") " pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.357866 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-h52hn\" (UID: \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\") " pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.357940 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-h52hn\" (UID: \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\") " pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.358046 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqzfw\" (UniqueName: \"kubernetes.io/projected/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-kube-api-access-sqzfw\") pod \"ssh-known-hosts-edpm-deployment-h52hn\" (UID: \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\") " pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.362608 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-h52hn\" (UID: \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\") " pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.363902 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-h52hn\" (UID: \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\") " pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.378395 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqzfw\" (UniqueName: \"kubernetes.io/projected/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-kube-api-access-sqzfw\") pod \"ssh-known-hosts-edpm-deployment-h52hn\" (UID: \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\") " pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.470620 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" Dec 01 19:09:19 crc kubenswrapper[4935]: I1201 19:09:19.508514 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:09:19 crc kubenswrapper[4935]: E1201 19:09:19.509049 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:09:20 crc kubenswrapper[4935]: I1201 19:09:20.072800 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-h52hn"] Dec 01 19:09:20 crc kubenswrapper[4935]: W1201 19:09:20.080429 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1b9b889f_4f7c_49c9_9bfa_cb42c62d0fa7.slice/crio-ebada6d5da5f416b7b980149368016c00378f18e7bf7de3983a6246323279fe7 WatchSource:0}: Error finding container ebada6d5da5f416b7b980149368016c00378f18e7bf7de3983a6246323279fe7: Status 404 returned error can't find the container with id ebada6d5da5f416b7b980149368016c00378f18e7bf7de3983a6246323279fe7 Dec 01 19:09:20 crc kubenswrapper[4935]: I1201 19:09:20.083550 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 19:09:21 crc kubenswrapper[4935]: I1201 19:09:21.054323 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" event={"ID":"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7","Type":"ContainerStarted","Data":"06083e5e0bdd8b21128f4c0ee867f4772ecafb53481c53f2f27168632aeaedc5"} Dec 01 19:09:21 crc kubenswrapper[4935]: I1201 19:09:21.054587 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" event={"ID":"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7","Type":"ContainerStarted","Data":"ebada6d5da5f416b7b980149368016c00378f18e7bf7de3983a6246323279fe7"} Dec 01 19:09:21 crc kubenswrapper[4935]: I1201 19:09:21.075332 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" podStartSLOduration=1.513949968 podStartE2EDuration="2.07531037s" podCreationTimestamp="2025-12-01 19:09:19 +0000 UTC" firstStartedPulling="2025-12-01 19:09:20.08331833 +0000 UTC m=+2374.104947589" lastFinishedPulling="2025-12-01 19:09:20.644678712 +0000 UTC m=+2374.666307991" observedRunningTime="2025-12-01 19:09:21.073047249 +0000 UTC m=+2375.094676528" watchObservedRunningTime="2025-12-01 19:09:21.07531037 +0000 UTC m=+2375.096939629" Dec 01 19:09:22 crc kubenswrapper[4935]: I1201 19:09:22.069298 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-6rt65"] Dec 01 19:09:22 crc kubenswrapper[4935]: I1201 19:09:22.079126 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-6rt65"] Dec 01 19:09:22 crc kubenswrapper[4935]: I1201 19:09:22.526873 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25a03a1c-18c2-4f1d-96bf-cbb5bdae3749" path="/var/lib/kubelet/pods/25a03a1c-18c2-4f1d-96bf-cbb5bdae3749/volumes" Dec 01 19:09:29 crc kubenswrapper[4935]: I1201 19:09:29.144714 4935 generic.go:334] "Generic (PLEG): container finished" podID="1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7" containerID="06083e5e0bdd8b21128f4c0ee867f4772ecafb53481c53f2f27168632aeaedc5" exitCode=0 Dec 01 19:09:29 crc kubenswrapper[4935]: I1201 19:09:29.144801 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" event={"ID":"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7","Type":"ContainerDied","Data":"06083e5e0bdd8b21128f4c0ee867f4772ecafb53481c53f2f27168632aeaedc5"} Dec 01 19:09:30 crc kubenswrapper[4935]: I1201 19:09:30.510614 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:09:30 crc kubenswrapper[4935]: E1201 19:09:30.511448 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:09:30 crc kubenswrapper[4935]: I1201 19:09:30.712646 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" Dec 01 19:09:30 crc kubenswrapper[4935]: I1201 19:09:30.773495 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-inventory-0\") pod \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\" (UID: \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\") " Dec 01 19:09:30 crc kubenswrapper[4935]: I1201 19:09:30.773766 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqzfw\" (UniqueName: \"kubernetes.io/projected/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-kube-api-access-sqzfw\") pod \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\" (UID: \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\") " Dec 01 19:09:30 crc kubenswrapper[4935]: I1201 19:09:30.773882 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-ssh-key-openstack-edpm-ipam\") pod \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\" (UID: \"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7\") " Dec 01 19:09:30 crc kubenswrapper[4935]: I1201 19:09:30.779134 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-kube-api-access-sqzfw" (OuterVolumeSpecName: "kube-api-access-sqzfw") pod "1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7" (UID: "1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7"). InnerVolumeSpecName "kube-api-access-sqzfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:09:30 crc kubenswrapper[4935]: I1201 19:09:30.805539 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7" (UID: "1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:09:30 crc kubenswrapper[4935]: I1201 19:09:30.827771 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7" (UID: "1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:09:30 crc kubenswrapper[4935]: I1201 19:09:30.876967 4935 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:09:30 crc kubenswrapper[4935]: I1201 19:09:30.877003 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqzfw\" (UniqueName: \"kubernetes.io/projected/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-kube-api-access-sqzfw\") on node \"crc\" DevicePath \"\"" Dec 01 19:09:30 crc kubenswrapper[4935]: I1201 19:09:30.877014 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.174531 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" event={"ID":"1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7","Type":"ContainerDied","Data":"ebada6d5da5f416b7b980149368016c00378f18e7bf7de3983a6246323279fe7"} Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.174567 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebada6d5da5f416b7b980149368016c00378f18e7bf7de3983a6246323279fe7" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.174627 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-h52hn" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.263042 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq"] Dec 01 19:09:31 crc kubenswrapper[4935]: E1201 19:09:31.264054 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7" containerName="ssh-known-hosts-edpm-deployment" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.264076 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7" containerName="ssh-known-hosts-edpm-deployment" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.264435 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7" containerName="ssh-known-hosts-edpm-deployment" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.265491 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.267918 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.268233 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.272003 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.272064 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.273214 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq"] Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.296577 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6spvq\" (UID: \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.297370 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjxpt\" (UniqueName: \"kubernetes.io/projected/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-kube-api-access-sjxpt\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6spvq\" (UID: \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.297556 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6spvq\" (UID: \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.400246 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6spvq\" (UID: \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.400354 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjxpt\" (UniqueName: \"kubernetes.io/projected/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-kube-api-access-sjxpt\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6spvq\" (UID: \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.400452 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6spvq\" (UID: \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.405311 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6spvq\" (UID: \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.408099 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6spvq\" (UID: \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.430855 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjxpt\" (UniqueName: \"kubernetes.io/projected/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-kube-api-access-sjxpt\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6spvq\" (UID: \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" Dec 01 19:09:31 crc kubenswrapper[4935]: I1201 19:09:31.585628 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" Dec 01 19:09:32 crc kubenswrapper[4935]: W1201 19:09:32.231659 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc13ec0c0_24c3_463c_b7c3_fcbf4bfbacd9.slice/crio-738833d4bbf490edc10847c947728da41fa99d2ad5e7a4cb8e2eb3cdbfdc32c8 WatchSource:0}: Error finding container 738833d4bbf490edc10847c947728da41fa99d2ad5e7a4cb8e2eb3cdbfdc32c8: Status 404 returned error can't find the container with id 738833d4bbf490edc10847c947728da41fa99d2ad5e7a4cb8e2eb3cdbfdc32c8 Dec 01 19:09:32 crc kubenswrapper[4935]: I1201 19:09:32.245872 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq"] Dec 01 19:09:33 crc kubenswrapper[4935]: I1201 19:09:33.200435 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" event={"ID":"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9","Type":"ContainerStarted","Data":"c329236e06be7cd18fedb4e4296e617ad109e0c39ff950d78097f9a8b466a38a"} Dec 01 19:09:33 crc kubenswrapper[4935]: I1201 19:09:33.201054 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" event={"ID":"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9","Type":"ContainerStarted","Data":"738833d4bbf490edc10847c947728da41fa99d2ad5e7a4cb8e2eb3cdbfdc32c8"} Dec 01 19:09:33 crc kubenswrapper[4935]: I1201 19:09:33.223545 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" podStartSLOduration=1.681449995 podStartE2EDuration="2.223520707s" podCreationTimestamp="2025-12-01 19:09:31 +0000 UTC" firstStartedPulling="2025-12-01 19:09:32.235862534 +0000 UTC m=+2386.257491833" lastFinishedPulling="2025-12-01 19:09:32.777933286 +0000 UTC m=+2386.799562545" observedRunningTime="2025-12-01 19:09:33.219247592 +0000 UTC m=+2387.240876851" watchObservedRunningTime="2025-12-01 19:09:33.223520707 +0000 UTC m=+2387.245149986" Dec 01 19:09:42 crc kubenswrapper[4935]: I1201 19:09:42.355342 4935 generic.go:334] "Generic (PLEG): container finished" podID="c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9" containerID="c329236e06be7cd18fedb4e4296e617ad109e0c39ff950d78097f9a8b466a38a" exitCode=0 Dec 01 19:09:42 crc kubenswrapper[4935]: I1201 19:09:42.355450 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" event={"ID":"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9","Type":"ContainerDied","Data":"c329236e06be7cd18fedb4e4296e617ad109e0c39ff950d78097f9a8b466a38a"} Dec 01 19:09:43 crc kubenswrapper[4935]: I1201 19:09:43.921835 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.030975 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjxpt\" (UniqueName: \"kubernetes.io/projected/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-kube-api-access-sjxpt\") pod \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\" (UID: \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\") " Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.031219 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-ssh-key\") pod \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\" (UID: \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\") " Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.031424 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-inventory\") pod \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\" (UID: \"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9\") " Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.043671 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-kube-api-access-sjxpt" (OuterVolumeSpecName: "kube-api-access-sjxpt") pod "c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9" (UID: "c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9"). InnerVolumeSpecName "kube-api-access-sjxpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.064532 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-inventory" (OuterVolumeSpecName: "inventory") pod "c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9" (UID: "c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.067284 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9" (UID: "c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.134079 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjxpt\" (UniqueName: \"kubernetes.io/projected/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-kube-api-access-sjxpt\") on node \"crc\" DevicePath \"\"" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.134115 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.134124 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.382936 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" event={"ID":"c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9","Type":"ContainerDied","Data":"738833d4bbf490edc10847c947728da41fa99d2ad5e7a4cb8e2eb3cdbfdc32c8"} Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.383515 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="738833d4bbf490edc10847c947728da41fa99d2ad5e7a4cb8e2eb3cdbfdc32c8" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.383052 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6spvq" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.509250 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:09:44 crc kubenswrapper[4935]: E1201 19:09:44.510041 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.535878 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9"] Dec 01 19:09:44 crc kubenswrapper[4935]: E1201 19:09:44.536810 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.536847 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.537494 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.539080 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9"] Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.539253 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.542490 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.542612 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.542888 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.544379 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.650657 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-286n9\" (UID: \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.650732 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lr69\" (UniqueName: \"kubernetes.io/projected/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-kube-api-access-6lr69\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-286n9\" (UID: \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.650963 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-286n9\" (UID: \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.752929 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-286n9\" (UID: \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.753023 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-286n9\" (UID: \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.753071 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lr69\" (UniqueName: \"kubernetes.io/projected/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-kube-api-access-6lr69\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-286n9\" (UID: \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.760019 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-286n9\" (UID: \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.765925 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-286n9\" (UID: \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.775599 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lr69\" (UniqueName: \"kubernetes.io/projected/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-kube-api-access-6lr69\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-286n9\" (UID: \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" Dec 01 19:09:44 crc kubenswrapper[4935]: I1201 19:09:44.874946 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" Dec 01 19:09:45 crc kubenswrapper[4935]: I1201 19:09:45.493728 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9"] Dec 01 19:09:46 crc kubenswrapper[4935]: I1201 19:09:46.425494 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" event={"ID":"8c994dd9-89ff-43a4-b424-65b8a2bfb67d","Type":"ContainerStarted","Data":"249e5f07d0bebf57b4afb3f211d96a18b276d73328839b7cca7a82f34419c5e9"} Dec 01 19:09:47 crc kubenswrapper[4935]: I1201 19:09:47.439078 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" event={"ID":"8c994dd9-89ff-43a4-b424-65b8a2bfb67d","Type":"ContainerStarted","Data":"eee79978884d542bc5847d2bc9b2c14ce81ca86fb73af2cd900ae7bd97dab0c0"} Dec 01 19:09:47 crc kubenswrapper[4935]: I1201 19:09:47.481892 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" podStartSLOduration=2.608314188 podStartE2EDuration="3.481867883s" podCreationTimestamp="2025-12-01 19:09:44 +0000 UTC" firstStartedPulling="2025-12-01 19:09:45.515941854 +0000 UTC m=+2399.537571123" lastFinishedPulling="2025-12-01 19:09:46.389495559 +0000 UTC m=+2400.411124818" observedRunningTime="2025-12-01 19:09:47.466681053 +0000 UTC m=+2401.488310352" watchObservedRunningTime="2025-12-01 19:09:47.481867883 +0000 UTC m=+2401.503497172" Dec 01 19:09:56 crc kubenswrapper[4935]: I1201 19:09:56.635435 4935 generic.go:334] "Generic (PLEG): container finished" podID="8c994dd9-89ff-43a4-b424-65b8a2bfb67d" containerID="eee79978884d542bc5847d2bc9b2c14ce81ca86fb73af2cd900ae7bd97dab0c0" exitCode=0 Dec 01 19:09:56 crc kubenswrapper[4935]: I1201 19:09:56.635598 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" event={"ID":"8c994dd9-89ff-43a4-b424-65b8a2bfb67d","Type":"ContainerDied","Data":"eee79978884d542bc5847d2bc9b2c14ce81ca86fb73af2cd900ae7bd97dab0c0"} Dec 01 19:09:57 crc kubenswrapper[4935]: I1201 19:09:57.508933 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:09:57 crc kubenswrapper[4935]: E1201 19:09:57.509822 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.271640 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.349027 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-ssh-key\") pod \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\" (UID: \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\") " Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.349451 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-inventory\") pod \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\" (UID: \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\") " Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.349603 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lr69\" (UniqueName: \"kubernetes.io/projected/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-kube-api-access-6lr69\") pod \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\" (UID: \"8c994dd9-89ff-43a4-b424-65b8a2bfb67d\") " Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.363435 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-kube-api-access-6lr69" (OuterVolumeSpecName: "kube-api-access-6lr69") pod "8c994dd9-89ff-43a4-b424-65b8a2bfb67d" (UID: "8c994dd9-89ff-43a4-b424-65b8a2bfb67d"). InnerVolumeSpecName "kube-api-access-6lr69". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.413748 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-inventory" (OuterVolumeSpecName: "inventory") pod "8c994dd9-89ff-43a4-b424-65b8a2bfb67d" (UID: "8c994dd9-89ff-43a4-b424-65b8a2bfb67d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.438514 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8c994dd9-89ff-43a4-b424-65b8a2bfb67d" (UID: "8c994dd9-89ff-43a4-b424-65b8a2bfb67d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.454102 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lr69\" (UniqueName: \"kubernetes.io/projected/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-kube-api-access-6lr69\") on node \"crc\" DevicePath \"\"" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.454132 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.454156 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c994dd9-89ff-43a4-b424-65b8a2bfb67d-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.667532 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" event={"ID":"8c994dd9-89ff-43a4-b424-65b8a2bfb67d","Type":"ContainerDied","Data":"249e5f07d0bebf57b4afb3f211d96a18b276d73328839b7cca7a82f34419c5e9"} Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.667592 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="249e5f07d0bebf57b4afb3f211d96a18b276d73328839b7cca7a82f34419c5e9" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.667643 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-286n9" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.742051 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc"] Dec 01 19:09:58 crc kubenswrapper[4935]: E1201 19:09:58.742505 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c994dd9-89ff-43a4-b424-65b8a2bfb67d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.742523 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c994dd9-89ff-43a4-b424-65b8a2bfb67d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.742747 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c994dd9-89ff-43a4-b424-65b8a2bfb67d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.743550 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.747008 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.747524 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.747663 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.749048 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.749463 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.749503 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.749951 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.749961 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.750331 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.769776 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc"] Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.861974 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.862351 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.862475 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.862515 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.862643 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.862740 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.862793 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.862876 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bxrl\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-kube-api-access-7bxrl\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.863088 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.863223 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-telemetry-power-monitoring-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.863262 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.863307 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.863425 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.863463 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.863568 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.863620 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966064 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966130 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966273 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966317 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966374 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966406 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966445 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966472 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966536 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966564 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966595 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966630 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bxrl\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-kube-api-access-7bxrl\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966712 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966776 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-telemetry-power-monitoring-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966823 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.966900 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.972065 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.972538 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.972604 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.972710 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.972868 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.973286 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.973853 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.974126 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-telemetry-power-monitoring-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.974989 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.975624 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.976482 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.976515 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.977382 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.980011 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.981509 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:58 crc kubenswrapper[4935]: I1201 19:09:58.983949 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bxrl\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-kube-api-access-7bxrl\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-btfmc\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:59 crc kubenswrapper[4935]: I1201 19:09:59.062870 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:09:59 crc kubenswrapper[4935]: W1201 19:09:59.504922 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod378c4e86_bba0_4b39_8708_c925caf2756b.slice/crio-2549beaf9afdf91b9f13ed31f3731ea08652503186e92da9cc5fe45f9a993d27 WatchSource:0}: Error finding container 2549beaf9afdf91b9f13ed31f3731ea08652503186e92da9cc5fe45f9a993d27: Status 404 returned error can't find the container with id 2549beaf9afdf91b9f13ed31f3731ea08652503186e92da9cc5fe45f9a993d27 Dec 01 19:09:59 crc kubenswrapper[4935]: I1201 19:09:59.523129 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc"] Dec 01 19:09:59 crc kubenswrapper[4935]: I1201 19:09:59.685386 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" event={"ID":"378c4e86-bba0-4b39-8708-c925caf2756b","Type":"ContainerStarted","Data":"2549beaf9afdf91b9f13ed31f3731ea08652503186e92da9cc5fe45f9a993d27"} Dec 01 19:10:00 crc kubenswrapper[4935]: I1201 19:10:00.706523 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" event={"ID":"378c4e86-bba0-4b39-8708-c925caf2756b","Type":"ContainerStarted","Data":"654799bd7ac33af2bd425daadf03b67c0c3445b2fd8dbd5a3bd8c042b1ba1d0e"} Dec 01 19:10:00 crc kubenswrapper[4935]: I1201 19:10:00.734992 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" podStartSLOduration=2.050724141 podStartE2EDuration="2.73497558s" podCreationTimestamp="2025-12-01 19:09:58 +0000 UTC" firstStartedPulling="2025-12-01 19:09:59.507884155 +0000 UTC m=+2413.529513404" lastFinishedPulling="2025-12-01 19:10:00.192135574 +0000 UTC m=+2414.213764843" observedRunningTime="2025-12-01 19:10:00.729377393 +0000 UTC m=+2414.751006652" watchObservedRunningTime="2025-12-01 19:10:00.73497558 +0000 UTC m=+2414.756604839" Dec 01 19:10:01 crc kubenswrapper[4935]: I1201 19:10:01.066841 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-qm9b2"] Dec 01 19:10:01 crc kubenswrapper[4935]: I1201 19:10:01.084311 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-qm9b2"] Dec 01 19:10:02 crc kubenswrapper[4935]: I1201 19:10:02.519881 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a02039c1-bb22-4f93-99ff-e9d4bead1b9c" path="/var/lib/kubelet/pods/a02039c1-bb22-4f93-99ff-e9d4bead1b9c/volumes" Dec 01 19:10:08 crc kubenswrapper[4935]: I1201 19:10:08.509021 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:10:08 crc kubenswrapper[4935]: E1201 19:10:08.510256 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:10:10 crc kubenswrapper[4935]: I1201 19:10:10.153269 4935 scope.go:117] "RemoveContainer" containerID="f97c8d7bf3e01b4a6e801943fb78ce5c8aa71110c35ad516cef44d8c11b52b40" Dec 01 19:10:10 crc kubenswrapper[4935]: I1201 19:10:10.210464 4935 scope.go:117] "RemoveContainer" containerID="08aab24b030bba234bd9802bbe926ae9c03d99818aa74748d8eff364d6319485" Dec 01 19:10:22 crc kubenswrapper[4935]: I1201 19:10:22.509164 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:10:22 crc kubenswrapper[4935]: E1201 19:10:22.510072 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:10:36 crc kubenswrapper[4935]: I1201 19:10:36.545298 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:10:36 crc kubenswrapper[4935]: E1201 19:10:36.546807 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:10:49 crc kubenswrapper[4935]: I1201 19:10:49.511303 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:10:49 crc kubenswrapper[4935]: E1201 19:10:49.512591 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:10:54 crc kubenswrapper[4935]: I1201 19:10:54.472702 4935 generic.go:334] "Generic (PLEG): container finished" podID="378c4e86-bba0-4b39-8708-c925caf2756b" containerID="654799bd7ac33af2bd425daadf03b67c0c3445b2fd8dbd5a3bd8c042b1ba1d0e" exitCode=0 Dec 01 19:10:54 crc kubenswrapper[4935]: I1201 19:10:54.472822 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" event={"ID":"378c4e86-bba0-4b39-8708-c925caf2756b","Type":"ContainerDied","Data":"654799bd7ac33af2bd425daadf03b67c0c3445b2fd8dbd5a3bd8c042b1ba1d0e"} Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.019992 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.131199 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-inventory\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.131246 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-telemetry-combined-ca-bundle\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.131279 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-bootstrap-combined-ca-bundle\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.131369 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-neutron-metadata-combined-ca-bundle\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.131423 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-libvirt-combined-ca-bundle\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.131474 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-nova-combined-ca-bundle\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.131584 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-telemetry-power-monitoring-combined-ca-bundle\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.131646 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.131721 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bxrl\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-kube-api-access-7bxrl\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.131778 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-ovn-combined-ca-bundle\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.131844 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.131982 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-ovn-default-certs-0\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.132081 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-ssh-key\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.132172 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-repo-setup-combined-ca-bundle\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.132260 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.132300 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"378c4e86-bba0-4b39-8708-c925caf2756b\" (UID: \"378c4e86-bba0-4b39-8708-c925caf2756b\") " Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.139718 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-kube-api-access-7bxrl" (OuterVolumeSpecName: "kube-api-access-7bxrl") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "kube-api-access-7bxrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.140634 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.140725 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.140779 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.141286 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.141291 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.141384 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.141631 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-telemetry-power-monitoring-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-power-monitoring-combined-ca-bundle") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "telemetry-power-monitoring-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.142843 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.142867 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.142964 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.144458 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.145888 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.147671 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.169840 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-inventory" (OuterVolumeSpecName: "inventory") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.180652 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "378c4e86-bba0-4b39-8708-c925caf2756b" (UID: "378c4e86-bba0-4b39-8708-c925caf2756b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242730 4935 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242777 4935 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242796 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242811 4935 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242827 4935 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242839 4935 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242852 4935 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242864 4935 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242877 4935 reconciler_common.go:293] "Volume detached for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-telemetry-power-monitoring-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242891 4935 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242904 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bxrl\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-kube-api-access-7bxrl\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242916 4935 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242929 4935 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-telemetry-power-monitoring-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242946 4935 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/378c4e86-bba0-4b39-8708-c925caf2756b-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242960 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.242971 4935 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/378c4e86-bba0-4b39-8708-c925caf2756b-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.503244 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" event={"ID":"378c4e86-bba0-4b39-8708-c925caf2756b","Type":"ContainerDied","Data":"2549beaf9afdf91b9f13ed31f3731ea08652503186e92da9cc5fe45f9a993d27"} Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.503289 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2549beaf9afdf91b9f13ed31f3731ea08652503186e92da9cc5fe45f9a993d27" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.503323 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-btfmc" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.732422 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5"] Dec 01 19:10:56 crc kubenswrapper[4935]: E1201 19:10:56.736558 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="378c4e86-bba0-4b39-8708-c925caf2756b" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.736590 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="378c4e86-bba0-4b39-8708-c925caf2756b" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.736935 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="378c4e86-bba0-4b39-8708-c925caf2756b" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.737713 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.742524 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.742703 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.742848 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.743028 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.743206 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.760432 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5"] Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.856517 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c0020572-52c6-4df3-8074-935cd16a074e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.856593 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.856633 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.856675 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.856751 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqbc7\" (UniqueName: \"kubernetes.io/projected/c0020572-52c6-4df3-8074-935cd16a074e-kube-api-access-cqbc7\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.958617 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c0020572-52c6-4df3-8074-935cd16a074e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.958712 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.958755 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.958806 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.958894 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqbc7\" (UniqueName: \"kubernetes.io/projected/c0020572-52c6-4df3-8074-935cd16a074e-kube-api-access-cqbc7\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.959472 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c0020572-52c6-4df3-8074-935cd16a074e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.963028 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.964742 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.964984 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:56 crc kubenswrapper[4935]: I1201 19:10:56.981448 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqbc7\" (UniqueName: \"kubernetes.io/projected/c0020572-52c6-4df3-8074-935cd16a074e-kube-api-access-cqbc7\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ftdr5\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:57 crc kubenswrapper[4935]: I1201 19:10:57.068297 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:10:58 crc kubenswrapper[4935]: I1201 19:10:58.254371 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5"] Dec 01 19:10:58 crc kubenswrapper[4935]: I1201 19:10:58.541733 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" event={"ID":"c0020572-52c6-4df3-8074-935cd16a074e","Type":"ContainerStarted","Data":"7da620bcb40700069bda096a1c4711dd8401cf2478f37038a50cb531eaa33a57"} Dec 01 19:10:59 crc kubenswrapper[4935]: I1201 19:10:59.556675 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" event={"ID":"c0020572-52c6-4df3-8074-935cd16a074e","Type":"ContainerStarted","Data":"3b3759325a6c3db8a7606bc81447bc8f69becde272f68269545727789760ac06"} Dec 01 19:10:59 crc kubenswrapper[4935]: I1201 19:10:59.584451 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" podStartSLOduration=2.903915845 podStartE2EDuration="3.584435026s" podCreationTimestamp="2025-12-01 19:10:56 +0000 UTC" firstStartedPulling="2025-12-01 19:10:58.262418109 +0000 UTC m=+2472.284047368" lastFinishedPulling="2025-12-01 19:10:58.94293729 +0000 UTC m=+2472.964566549" observedRunningTime="2025-12-01 19:10:59.577180807 +0000 UTC m=+2473.598810076" watchObservedRunningTime="2025-12-01 19:10:59.584435026 +0000 UTC m=+2473.606064285" Dec 01 19:11:02 crc kubenswrapper[4935]: I1201 19:11:02.508680 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:11:02 crc kubenswrapper[4935]: E1201 19:11:02.510058 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:11:14 crc kubenswrapper[4935]: I1201 19:11:14.509380 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:11:14 crc kubenswrapper[4935]: E1201 19:11:14.510616 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:11:28 crc kubenswrapper[4935]: I1201 19:11:28.524022 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:11:28 crc kubenswrapper[4935]: E1201 19:11:28.525688 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:11:43 crc kubenswrapper[4935]: I1201 19:11:43.509194 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:11:43 crc kubenswrapper[4935]: E1201 19:11:43.510555 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:11:53 crc kubenswrapper[4935]: I1201 19:11:53.521762 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tflnw"] Dec 01 19:11:53 crc kubenswrapper[4935]: I1201 19:11:53.528264 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:11:53 crc kubenswrapper[4935]: I1201 19:11:53.552403 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tflnw"] Dec 01 19:11:53 crc kubenswrapper[4935]: I1201 19:11:53.681614 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nb65r\" (UniqueName: \"kubernetes.io/projected/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-kube-api-access-nb65r\") pod \"certified-operators-tflnw\" (UID: \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\") " pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:11:53 crc kubenswrapper[4935]: I1201 19:11:53.682775 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-utilities\") pod \"certified-operators-tflnw\" (UID: \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\") " pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:11:53 crc kubenswrapper[4935]: I1201 19:11:53.682945 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-catalog-content\") pod \"certified-operators-tflnw\" (UID: \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\") " pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:11:53 crc kubenswrapper[4935]: I1201 19:11:53.785631 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-catalog-content\") pod \"certified-operators-tflnw\" (UID: \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\") " pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:11:53 crc kubenswrapper[4935]: I1201 19:11:53.785924 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nb65r\" (UniqueName: \"kubernetes.io/projected/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-kube-api-access-nb65r\") pod \"certified-operators-tflnw\" (UID: \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\") " pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:11:53 crc kubenswrapper[4935]: I1201 19:11:53.785989 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-utilities\") pod \"certified-operators-tflnw\" (UID: \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\") " pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:11:53 crc kubenswrapper[4935]: I1201 19:11:53.786208 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-catalog-content\") pod \"certified-operators-tflnw\" (UID: \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\") " pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:11:53 crc kubenswrapper[4935]: I1201 19:11:53.786492 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-utilities\") pod \"certified-operators-tflnw\" (UID: \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\") " pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:11:53 crc kubenswrapper[4935]: I1201 19:11:53.812131 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nb65r\" (UniqueName: \"kubernetes.io/projected/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-kube-api-access-nb65r\") pod \"certified-operators-tflnw\" (UID: \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\") " pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:11:53 crc kubenswrapper[4935]: I1201 19:11:53.873810 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:11:54 crc kubenswrapper[4935]: I1201 19:11:54.394934 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tflnw"] Dec 01 19:11:55 crc kubenswrapper[4935]: I1201 19:11:55.275394 4935 generic.go:334] "Generic (PLEG): container finished" podID="9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" containerID="1a4267cc8a7887cddf0de4724c554c385f12bb8a7b8fa649dbc5e9166fb7f050" exitCode=0 Dec 01 19:11:55 crc kubenswrapper[4935]: I1201 19:11:55.275517 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tflnw" event={"ID":"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8","Type":"ContainerDied","Data":"1a4267cc8a7887cddf0de4724c554c385f12bb8a7b8fa649dbc5e9166fb7f050"} Dec 01 19:11:55 crc kubenswrapper[4935]: I1201 19:11:55.276973 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tflnw" event={"ID":"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8","Type":"ContainerStarted","Data":"7943039e6eac996ad4bdc223bb642daffb77b5a5be9f31370f6db3fc36b0f926"} Dec 01 19:11:57 crc kubenswrapper[4935]: I1201 19:11:57.299576 4935 generic.go:334] "Generic (PLEG): container finished" podID="9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" containerID="edfd43cdac8068a6f7c3f3a5cace0d8c8d28ea36c0642ce072f576801c2ed607" exitCode=0 Dec 01 19:11:57 crc kubenswrapper[4935]: I1201 19:11:57.299627 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tflnw" event={"ID":"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8","Type":"ContainerDied","Data":"edfd43cdac8068a6f7c3f3a5cace0d8c8d28ea36c0642ce072f576801c2ed607"} Dec 01 19:11:58 crc kubenswrapper[4935]: I1201 19:11:58.508113 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:11:58 crc kubenswrapper[4935]: E1201 19:11:58.509136 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:11:59 crc kubenswrapper[4935]: I1201 19:11:59.330224 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tflnw" event={"ID":"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8","Type":"ContainerStarted","Data":"1cb9f207c3df007972ccf87d09f26e9f3c15d2ae8210e03a6d102bb73d0631ae"} Dec 01 19:11:59 crc kubenswrapper[4935]: I1201 19:11:59.355527 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tflnw" podStartSLOduration=3.534773145 podStartE2EDuration="6.355509056s" podCreationTimestamp="2025-12-01 19:11:53 +0000 UTC" firstStartedPulling="2025-12-01 19:11:55.277671341 +0000 UTC m=+2529.299300590" lastFinishedPulling="2025-12-01 19:11:58.098407222 +0000 UTC m=+2532.120036501" observedRunningTime="2025-12-01 19:11:59.353828163 +0000 UTC m=+2533.375457432" watchObservedRunningTime="2025-12-01 19:11:59.355509056 +0000 UTC m=+2533.377138315" Dec 01 19:12:03 crc kubenswrapper[4935]: I1201 19:12:03.875026 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:12:03 crc kubenswrapper[4935]: I1201 19:12:03.876021 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:12:03 crc kubenswrapper[4935]: I1201 19:12:03.947951 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:12:04 crc kubenswrapper[4935]: I1201 19:12:04.469993 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:12:04 crc kubenswrapper[4935]: I1201 19:12:04.552432 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tflnw"] Dec 01 19:12:06 crc kubenswrapper[4935]: I1201 19:12:06.431084 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tflnw" podUID="9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" containerName="registry-server" containerID="cri-o://1cb9f207c3df007972ccf87d09f26e9f3c15d2ae8210e03a6d102bb73d0631ae" gracePeriod=2 Dec 01 19:12:06 crc kubenswrapper[4935]: I1201 19:12:06.984294 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.035748 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-utilities\") pod \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\" (UID: \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\") " Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.036215 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nb65r\" (UniqueName: \"kubernetes.io/projected/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-kube-api-access-nb65r\") pod \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\" (UID: \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\") " Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.036305 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-catalog-content\") pod \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\" (UID: \"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8\") " Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.037476 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-utilities" (OuterVolumeSpecName: "utilities") pod "9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" (UID: "9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.042590 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-kube-api-access-nb65r" (OuterVolumeSpecName: "kube-api-access-nb65r") pod "9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" (UID: "9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8"). InnerVolumeSpecName "kube-api-access-nb65r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.125187 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" (UID: "9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.140915 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.140986 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nb65r\" (UniqueName: \"kubernetes.io/projected/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-kube-api-access-nb65r\") on node \"crc\" DevicePath \"\"" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.141008 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.454922 4935 generic.go:334] "Generic (PLEG): container finished" podID="9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" containerID="1cb9f207c3df007972ccf87d09f26e9f3c15d2ae8210e03a6d102bb73d0631ae" exitCode=0 Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.454991 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tflnw" event={"ID":"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8","Type":"ContainerDied","Data":"1cb9f207c3df007972ccf87d09f26e9f3c15d2ae8210e03a6d102bb73d0631ae"} Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.455032 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tflnw" event={"ID":"9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8","Type":"ContainerDied","Data":"7943039e6eac996ad4bdc223bb642daffb77b5a5be9f31370f6db3fc36b0f926"} Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.455061 4935 scope.go:117] "RemoveContainer" containerID="1cb9f207c3df007972ccf87d09f26e9f3c15d2ae8210e03a6d102bb73d0631ae" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.455239 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tflnw" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.500236 4935 scope.go:117] "RemoveContainer" containerID="edfd43cdac8068a6f7c3f3a5cace0d8c8d28ea36c0642ce072f576801c2ed607" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.528307 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tflnw"] Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.545588 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tflnw"] Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.562245 4935 scope.go:117] "RemoveContainer" containerID="1a4267cc8a7887cddf0de4724c554c385f12bb8a7b8fa649dbc5e9166fb7f050" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.609953 4935 scope.go:117] "RemoveContainer" containerID="1cb9f207c3df007972ccf87d09f26e9f3c15d2ae8210e03a6d102bb73d0631ae" Dec 01 19:12:07 crc kubenswrapper[4935]: E1201 19:12:07.611068 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cb9f207c3df007972ccf87d09f26e9f3c15d2ae8210e03a6d102bb73d0631ae\": container with ID starting with 1cb9f207c3df007972ccf87d09f26e9f3c15d2ae8210e03a6d102bb73d0631ae not found: ID does not exist" containerID="1cb9f207c3df007972ccf87d09f26e9f3c15d2ae8210e03a6d102bb73d0631ae" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.611126 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cb9f207c3df007972ccf87d09f26e9f3c15d2ae8210e03a6d102bb73d0631ae"} err="failed to get container status \"1cb9f207c3df007972ccf87d09f26e9f3c15d2ae8210e03a6d102bb73d0631ae\": rpc error: code = NotFound desc = could not find container \"1cb9f207c3df007972ccf87d09f26e9f3c15d2ae8210e03a6d102bb73d0631ae\": container with ID starting with 1cb9f207c3df007972ccf87d09f26e9f3c15d2ae8210e03a6d102bb73d0631ae not found: ID does not exist" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.611178 4935 scope.go:117] "RemoveContainer" containerID="edfd43cdac8068a6f7c3f3a5cace0d8c8d28ea36c0642ce072f576801c2ed607" Dec 01 19:12:07 crc kubenswrapper[4935]: E1201 19:12:07.611697 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edfd43cdac8068a6f7c3f3a5cace0d8c8d28ea36c0642ce072f576801c2ed607\": container with ID starting with edfd43cdac8068a6f7c3f3a5cace0d8c8d28ea36c0642ce072f576801c2ed607 not found: ID does not exist" containerID="edfd43cdac8068a6f7c3f3a5cace0d8c8d28ea36c0642ce072f576801c2ed607" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.611752 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edfd43cdac8068a6f7c3f3a5cace0d8c8d28ea36c0642ce072f576801c2ed607"} err="failed to get container status \"edfd43cdac8068a6f7c3f3a5cace0d8c8d28ea36c0642ce072f576801c2ed607\": rpc error: code = NotFound desc = could not find container \"edfd43cdac8068a6f7c3f3a5cace0d8c8d28ea36c0642ce072f576801c2ed607\": container with ID starting with edfd43cdac8068a6f7c3f3a5cace0d8c8d28ea36c0642ce072f576801c2ed607 not found: ID does not exist" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.611788 4935 scope.go:117] "RemoveContainer" containerID="1a4267cc8a7887cddf0de4724c554c385f12bb8a7b8fa649dbc5e9166fb7f050" Dec 01 19:12:07 crc kubenswrapper[4935]: E1201 19:12:07.612216 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a4267cc8a7887cddf0de4724c554c385f12bb8a7b8fa649dbc5e9166fb7f050\": container with ID starting with 1a4267cc8a7887cddf0de4724c554c385f12bb8a7b8fa649dbc5e9166fb7f050 not found: ID does not exist" containerID="1a4267cc8a7887cddf0de4724c554c385f12bb8a7b8fa649dbc5e9166fb7f050" Dec 01 19:12:07 crc kubenswrapper[4935]: I1201 19:12:07.612301 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a4267cc8a7887cddf0de4724c554c385f12bb8a7b8fa649dbc5e9166fb7f050"} err="failed to get container status \"1a4267cc8a7887cddf0de4724c554c385f12bb8a7b8fa649dbc5e9166fb7f050\": rpc error: code = NotFound desc = could not find container \"1a4267cc8a7887cddf0de4724c554c385f12bb8a7b8fa649dbc5e9166fb7f050\": container with ID starting with 1a4267cc8a7887cddf0de4724c554c385f12bb8a7b8fa649dbc5e9166fb7f050 not found: ID does not exist" Dec 01 19:12:08 crc kubenswrapper[4935]: I1201 19:12:08.525500 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" path="/var/lib/kubelet/pods/9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8/volumes" Dec 01 19:12:09 crc kubenswrapper[4935]: E1201 19:12:09.948028 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0020572_52c6_4df3_8074_935cd16a074e.slice/crio-3b3759325a6c3db8a7606bc81447bc8f69becde272f68269545727789760ac06.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:12:10 crc kubenswrapper[4935]: I1201 19:12:10.532304 4935 generic.go:334] "Generic (PLEG): container finished" podID="c0020572-52c6-4df3-8074-935cd16a074e" containerID="3b3759325a6c3db8a7606bc81447bc8f69becde272f68269545727789760ac06" exitCode=0 Dec 01 19:12:10 crc kubenswrapper[4935]: I1201 19:12:10.532675 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" event={"ID":"c0020572-52c6-4df3-8074-935cd16a074e","Type":"ContainerDied","Data":"3b3759325a6c3db8a7606bc81447bc8f69becde272f68269545727789760ac06"} Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.116213 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.178969 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-ovn-combined-ca-bundle\") pod \"c0020572-52c6-4df3-8074-935cd16a074e\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.179171 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-ssh-key\") pod \"c0020572-52c6-4df3-8074-935cd16a074e\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.179213 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c0020572-52c6-4df3-8074-935cd16a074e-ovncontroller-config-0\") pod \"c0020572-52c6-4df3-8074-935cd16a074e\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.179300 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqbc7\" (UniqueName: \"kubernetes.io/projected/c0020572-52c6-4df3-8074-935cd16a074e-kube-api-access-cqbc7\") pod \"c0020572-52c6-4df3-8074-935cd16a074e\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.179500 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-inventory\") pod \"c0020572-52c6-4df3-8074-935cd16a074e\" (UID: \"c0020572-52c6-4df3-8074-935cd16a074e\") " Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.194454 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "c0020572-52c6-4df3-8074-935cd16a074e" (UID: "c0020572-52c6-4df3-8074-935cd16a074e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.200483 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0020572-52c6-4df3-8074-935cd16a074e-kube-api-access-cqbc7" (OuterVolumeSpecName: "kube-api-access-cqbc7") pod "c0020572-52c6-4df3-8074-935cd16a074e" (UID: "c0020572-52c6-4df3-8074-935cd16a074e"). InnerVolumeSpecName "kube-api-access-cqbc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.258402 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0020572-52c6-4df3-8074-935cd16a074e-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "c0020572-52c6-4df3-8074-935cd16a074e" (UID: "c0020572-52c6-4df3-8074-935cd16a074e"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.284117 4935 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.284409 4935 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c0020572-52c6-4df3-8074-935cd16a074e-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.284485 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqbc7\" (UniqueName: \"kubernetes.io/projected/c0020572-52c6-4df3-8074-935cd16a074e-kube-api-access-cqbc7\") on node \"crc\" DevicePath \"\"" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.286921 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-inventory" (OuterVolumeSpecName: "inventory") pod "c0020572-52c6-4df3-8074-935cd16a074e" (UID: "c0020572-52c6-4df3-8074-935cd16a074e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.297240 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c0020572-52c6-4df3-8074-935cd16a074e" (UID: "c0020572-52c6-4df3-8074-935cd16a074e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.386690 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.386722 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0020572-52c6-4df3-8074-935cd16a074e-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.508842 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:12:12 crc kubenswrapper[4935]: E1201 19:12:12.509315 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.557256 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" event={"ID":"c0020572-52c6-4df3-8074-935cd16a074e","Type":"ContainerDied","Data":"7da620bcb40700069bda096a1c4711dd8401cf2478f37038a50cb531eaa33a57"} Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.557412 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7da620bcb40700069bda096a1c4711dd8401cf2478f37038a50cb531eaa33a57" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.557506 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ftdr5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.670815 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5"] Dec 01 19:12:12 crc kubenswrapper[4935]: E1201 19:12:12.671286 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" containerName="extract-utilities" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.671303 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" containerName="extract-utilities" Dec 01 19:12:12 crc kubenswrapper[4935]: E1201 19:12:12.671315 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" containerName="registry-server" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.671322 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" containerName="registry-server" Dec 01 19:12:12 crc kubenswrapper[4935]: E1201 19:12:12.671330 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" containerName="extract-content" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.671338 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" containerName="extract-content" Dec 01 19:12:12 crc kubenswrapper[4935]: E1201 19:12:12.671346 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0020572-52c6-4df3-8074-935cd16a074e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.671353 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0020572-52c6-4df3-8074-935cd16a074e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.671568 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a6c3bd8-0c52-4b29-a056-a33a52d1e7b8" containerName="registry-server" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.671592 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0020572-52c6-4df3-8074-935cd16a074e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.672359 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.674725 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.674928 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.675084 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.675227 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.675931 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.676817 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.681615 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5"] Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.793770 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trkmw\" (UniqueName: \"kubernetes.io/projected/bc10e14c-8acc-448c-addc-745b67376f6e-kube-api-access-trkmw\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.794198 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.794459 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.794807 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.794859 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.794925 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.897370 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trkmw\" (UniqueName: \"kubernetes.io/projected/bc10e14c-8acc-448c-addc-745b67376f6e-kube-api-access-trkmw\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.898044 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.898314 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.899272 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.899489 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.899648 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.903487 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.904502 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.907672 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.919853 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.924364 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trkmw\" (UniqueName: \"kubernetes.io/projected/bc10e14c-8acc-448c-addc-745b67376f6e-kube-api-access-trkmw\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.924625 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:12 crc kubenswrapper[4935]: I1201 19:12:12.987927 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:12:13 crc kubenswrapper[4935]: I1201 19:12:13.549880 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5"] Dec 01 19:12:13 crc kubenswrapper[4935]: I1201 19:12:13.581340 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" event={"ID":"bc10e14c-8acc-448c-addc-745b67376f6e","Type":"ContainerStarted","Data":"9dbca6140943498c19bc9a4607bb5d17d93bf4d0517d265a5a8b5644fb744b37"} Dec 01 19:12:15 crc kubenswrapper[4935]: I1201 19:12:15.619036 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" event={"ID":"bc10e14c-8acc-448c-addc-745b67376f6e","Type":"ContainerStarted","Data":"5ab10f411a5fbe6004cc3a5d77edcd65e6d29a0a9295457e01f1b3238b19148f"} Dec 01 19:12:15 crc kubenswrapper[4935]: I1201 19:12:15.643030 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" podStartSLOduration=2.670761746 podStartE2EDuration="3.642998972s" podCreationTimestamp="2025-12-01 19:12:12 +0000 UTC" firstStartedPulling="2025-12-01 19:12:13.550728217 +0000 UTC m=+2547.572357486" lastFinishedPulling="2025-12-01 19:12:14.522965443 +0000 UTC m=+2548.544594712" observedRunningTime="2025-12-01 19:12:15.635479645 +0000 UTC m=+2549.657108954" watchObservedRunningTime="2025-12-01 19:12:15.642998972 +0000 UTC m=+2549.664628241" Dec 01 19:12:25 crc kubenswrapper[4935]: I1201 19:12:25.509371 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:12:26 crc kubenswrapper[4935]: I1201 19:12:26.806119 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"f2948ef4f4214b0864b73ab5c34e54e081d188d3aac2e23446397b144967d996"} Dec 01 19:13:11 crc kubenswrapper[4935]: I1201 19:13:11.414390 4935 generic.go:334] "Generic (PLEG): container finished" podID="bc10e14c-8acc-448c-addc-745b67376f6e" containerID="5ab10f411a5fbe6004cc3a5d77edcd65e6d29a0a9295457e01f1b3238b19148f" exitCode=0 Dec 01 19:13:11 crc kubenswrapper[4935]: I1201 19:13:11.414534 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" event={"ID":"bc10e14c-8acc-448c-addc-745b67376f6e","Type":"ContainerDied","Data":"5ab10f411a5fbe6004cc3a5d77edcd65e6d29a0a9295457e01f1b3238b19148f"} Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.061826 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.172852 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-neutron-metadata-combined-ca-bundle\") pod \"bc10e14c-8acc-448c-addc-745b67376f6e\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.173417 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-inventory\") pod \"bc10e14c-8acc-448c-addc-745b67376f6e\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.173635 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-ssh-key\") pod \"bc10e14c-8acc-448c-addc-745b67376f6e\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.173722 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-neutron-ovn-metadata-agent-neutron-config-0\") pod \"bc10e14c-8acc-448c-addc-745b67376f6e\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.173814 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trkmw\" (UniqueName: \"kubernetes.io/projected/bc10e14c-8acc-448c-addc-745b67376f6e-kube-api-access-trkmw\") pod \"bc10e14c-8acc-448c-addc-745b67376f6e\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.174105 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-nova-metadata-neutron-config-0\") pod \"bc10e14c-8acc-448c-addc-745b67376f6e\" (UID: \"bc10e14c-8acc-448c-addc-745b67376f6e\") " Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.178001 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "bc10e14c-8acc-448c-addc-745b67376f6e" (UID: "bc10e14c-8acc-448c-addc-745b67376f6e"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.184399 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc10e14c-8acc-448c-addc-745b67376f6e-kube-api-access-trkmw" (OuterVolumeSpecName: "kube-api-access-trkmw") pod "bc10e14c-8acc-448c-addc-745b67376f6e" (UID: "bc10e14c-8acc-448c-addc-745b67376f6e"). InnerVolumeSpecName "kube-api-access-trkmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.210507 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "bc10e14c-8acc-448c-addc-745b67376f6e" (UID: "bc10e14c-8acc-448c-addc-745b67376f6e"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.221702 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "bc10e14c-8acc-448c-addc-745b67376f6e" (UID: "bc10e14c-8acc-448c-addc-745b67376f6e"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.224276 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-inventory" (OuterVolumeSpecName: "inventory") pod "bc10e14c-8acc-448c-addc-745b67376f6e" (UID: "bc10e14c-8acc-448c-addc-745b67376f6e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.225808 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bc10e14c-8acc-448c-addc-745b67376f6e" (UID: "bc10e14c-8acc-448c-addc-745b67376f6e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.277103 4935 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.277137 4935 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.277169 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.277183 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.277195 4935 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bc10e14c-8acc-448c-addc-745b67376f6e-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.277209 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trkmw\" (UniqueName: \"kubernetes.io/projected/bc10e14c-8acc-448c-addc-745b67376f6e-kube-api-access-trkmw\") on node \"crc\" DevicePath \"\"" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.447264 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" event={"ID":"bc10e14c-8acc-448c-addc-745b67376f6e","Type":"ContainerDied","Data":"9dbca6140943498c19bc9a4607bb5d17d93bf4d0517d265a5a8b5644fb744b37"} Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.447346 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9dbca6140943498c19bc9a4607bb5d17d93bf4d0517d265a5a8b5644fb744b37" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.447434 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.558683 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm"] Dec 01 19:13:13 crc kubenswrapper[4935]: E1201 19:13:13.559435 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc10e14c-8acc-448c-addc-745b67376f6e" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.559463 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc10e14c-8acc-448c-addc-745b67376f6e" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.559856 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc10e14c-8acc-448c-addc-745b67376f6e" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.561088 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.563363 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.563372 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.564169 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.564207 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.571364 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.574220 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm"] Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.687019 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.687094 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.687172 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.687252 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdf6t\" (UniqueName: \"kubernetes.io/projected/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-kube-api-access-sdf6t\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.687289 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.789263 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.789321 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.789380 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.789454 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdf6t\" (UniqueName: \"kubernetes.io/projected/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-kube-api-access-sdf6t\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.789487 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.796254 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.796734 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.797198 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.802330 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.814795 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdf6t\" (UniqueName: \"kubernetes.io/projected/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-kube-api-access-sdf6t\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-szrdm\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:13 crc kubenswrapper[4935]: I1201 19:13:13.881582 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:13:14 crc kubenswrapper[4935]: I1201 19:13:14.529693 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm"] Dec 01 19:13:15 crc kubenswrapper[4935]: I1201 19:13:15.477434 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" event={"ID":"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd","Type":"ContainerStarted","Data":"8372783f10d8c5085ef07ebdbeb5f9fef098e8d7a5c8fa26cddfd1ebc7c8577a"} Dec 01 19:13:16 crc kubenswrapper[4935]: I1201 19:13:16.491385 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" event={"ID":"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd","Type":"ContainerStarted","Data":"28421b70af018def94b2dde45518abd259974a14267e7577787aa1adca542b55"} Dec 01 19:13:16 crc kubenswrapper[4935]: I1201 19:13:16.520277 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" podStartSLOduration=2.761378991 podStartE2EDuration="3.520247081s" podCreationTimestamp="2025-12-01 19:13:13 +0000 UTC" firstStartedPulling="2025-12-01 19:13:14.530447645 +0000 UTC m=+2608.552076934" lastFinishedPulling="2025-12-01 19:13:15.289315725 +0000 UTC m=+2609.310945024" observedRunningTime="2025-12-01 19:13:16.517272346 +0000 UTC m=+2610.538901635" watchObservedRunningTime="2025-12-01 19:13:16.520247081 +0000 UTC m=+2610.541876370" Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.093124 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vnc7d"] Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.097042 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.109002 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnc7d"] Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.277525 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27203b4-93da-4937-a8ff-f22695b745f1-utilities\") pod \"redhat-marketplace-vnc7d\" (UID: \"c27203b4-93da-4937-a8ff-f22695b745f1\") " pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.277590 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vh4xq\" (UniqueName: \"kubernetes.io/projected/c27203b4-93da-4937-a8ff-f22695b745f1-kube-api-access-vh4xq\") pod \"redhat-marketplace-vnc7d\" (UID: \"c27203b4-93da-4937-a8ff-f22695b745f1\") " pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.278038 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27203b4-93da-4937-a8ff-f22695b745f1-catalog-content\") pod \"redhat-marketplace-vnc7d\" (UID: \"c27203b4-93da-4937-a8ff-f22695b745f1\") " pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.380727 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27203b4-93da-4937-a8ff-f22695b745f1-catalog-content\") pod \"redhat-marketplace-vnc7d\" (UID: \"c27203b4-93da-4937-a8ff-f22695b745f1\") " pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.380933 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27203b4-93da-4937-a8ff-f22695b745f1-utilities\") pod \"redhat-marketplace-vnc7d\" (UID: \"c27203b4-93da-4937-a8ff-f22695b745f1\") " pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.380957 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vh4xq\" (UniqueName: \"kubernetes.io/projected/c27203b4-93da-4937-a8ff-f22695b745f1-kube-api-access-vh4xq\") pod \"redhat-marketplace-vnc7d\" (UID: \"c27203b4-93da-4937-a8ff-f22695b745f1\") " pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.381771 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27203b4-93da-4937-a8ff-f22695b745f1-catalog-content\") pod \"redhat-marketplace-vnc7d\" (UID: \"c27203b4-93da-4937-a8ff-f22695b745f1\") " pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.382062 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27203b4-93da-4937-a8ff-f22695b745f1-utilities\") pod \"redhat-marketplace-vnc7d\" (UID: \"c27203b4-93da-4937-a8ff-f22695b745f1\") " pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.409019 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vh4xq\" (UniqueName: \"kubernetes.io/projected/c27203b4-93da-4937-a8ff-f22695b745f1-kube-api-access-vh4xq\") pod \"redhat-marketplace-vnc7d\" (UID: \"c27203b4-93da-4937-a8ff-f22695b745f1\") " pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.429572 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:35 crc kubenswrapper[4935]: I1201 19:14:35.982906 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnc7d"] Dec 01 19:14:36 crc kubenswrapper[4935]: I1201 19:14:36.832900 4935 generic.go:334] "Generic (PLEG): container finished" podID="c27203b4-93da-4937-a8ff-f22695b745f1" containerID="192ed1e3f86098ca61ab80b0059d39561fe3d46c1b747f2208123d9e378ce298" exitCode=0 Dec 01 19:14:36 crc kubenswrapper[4935]: I1201 19:14:36.832975 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnc7d" event={"ID":"c27203b4-93da-4937-a8ff-f22695b745f1","Type":"ContainerDied","Data":"192ed1e3f86098ca61ab80b0059d39561fe3d46c1b747f2208123d9e378ce298"} Dec 01 19:14:36 crc kubenswrapper[4935]: I1201 19:14:36.833271 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnc7d" event={"ID":"c27203b4-93da-4937-a8ff-f22695b745f1","Type":"ContainerStarted","Data":"0b58bc7bc1da3867f40179297c8fb560f2af90432e0fe5ef6fd069bb979c0cd4"} Dec 01 19:14:36 crc kubenswrapper[4935]: I1201 19:14:36.835077 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 19:14:38 crc kubenswrapper[4935]: I1201 19:14:38.862961 4935 generic.go:334] "Generic (PLEG): container finished" podID="c27203b4-93da-4937-a8ff-f22695b745f1" containerID="bff178bedb85f2609e7d5b46d30c08e031dcbdf0148760d084a032cefde14be5" exitCode=0 Dec 01 19:14:38 crc kubenswrapper[4935]: I1201 19:14:38.863196 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnc7d" event={"ID":"c27203b4-93da-4937-a8ff-f22695b745f1","Type":"ContainerDied","Data":"bff178bedb85f2609e7d5b46d30c08e031dcbdf0148760d084a032cefde14be5"} Dec 01 19:14:39 crc kubenswrapper[4935]: I1201 19:14:39.876408 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnc7d" event={"ID":"c27203b4-93da-4937-a8ff-f22695b745f1","Type":"ContainerStarted","Data":"3fd9613f8f5ad78e226e63f368a4a622b3c14db72805581b7129a1038ca38c23"} Dec 01 19:14:39 crc kubenswrapper[4935]: I1201 19:14:39.912766 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vnc7d" podStartSLOduration=2.364983036 podStartE2EDuration="4.912738148s" podCreationTimestamp="2025-12-01 19:14:35 +0000 UTC" firstStartedPulling="2025-12-01 19:14:36.834755129 +0000 UTC m=+2690.856384408" lastFinishedPulling="2025-12-01 19:14:39.382510221 +0000 UTC m=+2693.404139520" observedRunningTime="2025-12-01 19:14:39.901998818 +0000 UTC m=+2693.923628077" watchObservedRunningTime="2025-12-01 19:14:39.912738148 +0000 UTC m=+2693.934367447" Dec 01 19:14:45 crc kubenswrapper[4935]: I1201 19:14:45.431289 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:45 crc kubenswrapper[4935]: I1201 19:14:45.432118 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:45 crc kubenswrapper[4935]: I1201 19:14:45.516661 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:46 crc kubenswrapper[4935]: I1201 19:14:46.053321 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:46 crc kubenswrapper[4935]: I1201 19:14:46.126317 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnc7d"] Dec 01 19:14:47 crc kubenswrapper[4935]: I1201 19:14:47.987005 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vnc7d" podUID="c27203b4-93da-4937-a8ff-f22695b745f1" containerName="registry-server" containerID="cri-o://3fd9613f8f5ad78e226e63f368a4a622b3c14db72805581b7129a1038ca38c23" gracePeriod=2 Dec 01 19:14:48 crc kubenswrapper[4935]: I1201 19:14:48.524259 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:48 crc kubenswrapper[4935]: I1201 19:14:48.662781 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27203b4-93da-4937-a8ff-f22695b745f1-catalog-content\") pod \"c27203b4-93da-4937-a8ff-f22695b745f1\" (UID: \"c27203b4-93da-4937-a8ff-f22695b745f1\") " Dec 01 19:14:48 crc kubenswrapper[4935]: I1201 19:14:48.662861 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vh4xq\" (UniqueName: \"kubernetes.io/projected/c27203b4-93da-4937-a8ff-f22695b745f1-kube-api-access-vh4xq\") pod \"c27203b4-93da-4937-a8ff-f22695b745f1\" (UID: \"c27203b4-93da-4937-a8ff-f22695b745f1\") " Dec 01 19:14:48 crc kubenswrapper[4935]: I1201 19:14:48.663049 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27203b4-93da-4937-a8ff-f22695b745f1-utilities\") pod \"c27203b4-93da-4937-a8ff-f22695b745f1\" (UID: \"c27203b4-93da-4937-a8ff-f22695b745f1\") " Dec 01 19:14:48 crc kubenswrapper[4935]: I1201 19:14:48.664636 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c27203b4-93da-4937-a8ff-f22695b745f1-utilities" (OuterVolumeSpecName: "utilities") pod "c27203b4-93da-4937-a8ff-f22695b745f1" (UID: "c27203b4-93da-4937-a8ff-f22695b745f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:14:48 crc kubenswrapper[4935]: I1201 19:14:48.671495 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c27203b4-93da-4937-a8ff-f22695b745f1-kube-api-access-vh4xq" (OuterVolumeSpecName: "kube-api-access-vh4xq") pod "c27203b4-93da-4937-a8ff-f22695b745f1" (UID: "c27203b4-93da-4937-a8ff-f22695b745f1"). InnerVolumeSpecName "kube-api-access-vh4xq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:14:48 crc kubenswrapper[4935]: I1201 19:14:48.686314 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c27203b4-93da-4937-a8ff-f22695b745f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c27203b4-93da-4937-a8ff-f22695b745f1" (UID: "c27203b4-93da-4937-a8ff-f22695b745f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:14:48 crc kubenswrapper[4935]: I1201 19:14:48.766197 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27203b4-93da-4937-a8ff-f22695b745f1-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:14:48 crc kubenswrapper[4935]: I1201 19:14:48.766232 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27203b4-93da-4937-a8ff-f22695b745f1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:14:48 crc kubenswrapper[4935]: I1201 19:14:48.766246 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vh4xq\" (UniqueName: \"kubernetes.io/projected/c27203b4-93da-4937-a8ff-f22695b745f1-kube-api-access-vh4xq\") on node \"crc\" DevicePath \"\"" Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.000109 4935 generic.go:334] "Generic (PLEG): container finished" podID="c27203b4-93da-4937-a8ff-f22695b745f1" containerID="3fd9613f8f5ad78e226e63f368a4a622b3c14db72805581b7129a1038ca38c23" exitCode=0 Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.000220 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnc7d" event={"ID":"c27203b4-93da-4937-a8ff-f22695b745f1","Type":"ContainerDied","Data":"3fd9613f8f5ad78e226e63f368a4a622b3c14db72805581b7129a1038ca38c23"} Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.000556 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnc7d" event={"ID":"c27203b4-93da-4937-a8ff-f22695b745f1","Type":"ContainerDied","Data":"0b58bc7bc1da3867f40179297c8fb560f2af90432e0fe5ef6fd069bb979c0cd4"} Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.000593 4935 scope.go:117] "RemoveContainer" containerID="3fd9613f8f5ad78e226e63f368a4a622b3c14db72805581b7129a1038ca38c23" Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.000327 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnc7d" Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.051308 4935 scope.go:117] "RemoveContainer" containerID="bff178bedb85f2609e7d5b46d30c08e031dcbdf0148760d084a032cefde14be5" Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.051479 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnc7d"] Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.066525 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnc7d"] Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.079267 4935 scope.go:117] "RemoveContainer" containerID="192ed1e3f86098ca61ab80b0059d39561fe3d46c1b747f2208123d9e378ce298" Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.133782 4935 scope.go:117] "RemoveContainer" containerID="3fd9613f8f5ad78e226e63f368a4a622b3c14db72805581b7129a1038ca38c23" Dec 01 19:14:49 crc kubenswrapper[4935]: E1201 19:14:49.134392 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fd9613f8f5ad78e226e63f368a4a622b3c14db72805581b7129a1038ca38c23\": container with ID starting with 3fd9613f8f5ad78e226e63f368a4a622b3c14db72805581b7129a1038ca38c23 not found: ID does not exist" containerID="3fd9613f8f5ad78e226e63f368a4a622b3c14db72805581b7129a1038ca38c23" Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.134614 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fd9613f8f5ad78e226e63f368a4a622b3c14db72805581b7129a1038ca38c23"} err="failed to get container status \"3fd9613f8f5ad78e226e63f368a4a622b3c14db72805581b7129a1038ca38c23\": rpc error: code = NotFound desc = could not find container \"3fd9613f8f5ad78e226e63f368a4a622b3c14db72805581b7129a1038ca38c23\": container with ID starting with 3fd9613f8f5ad78e226e63f368a4a622b3c14db72805581b7129a1038ca38c23 not found: ID does not exist" Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.134787 4935 scope.go:117] "RemoveContainer" containerID="bff178bedb85f2609e7d5b46d30c08e031dcbdf0148760d084a032cefde14be5" Dec 01 19:14:49 crc kubenswrapper[4935]: E1201 19:14:49.135416 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bff178bedb85f2609e7d5b46d30c08e031dcbdf0148760d084a032cefde14be5\": container with ID starting with bff178bedb85f2609e7d5b46d30c08e031dcbdf0148760d084a032cefde14be5 not found: ID does not exist" containerID="bff178bedb85f2609e7d5b46d30c08e031dcbdf0148760d084a032cefde14be5" Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.135576 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bff178bedb85f2609e7d5b46d30c08e031dcbdf0148760d084a032cefde14be5"} err="failed to get container status \"bff178bedb85f2609e7d5b46d30c08e031dcbdf0148760d084a032cefde14be5\": rpc error: code = NotFound desc = could not find container \"bff178bedb85f2609e7d5b46d30c08e031dcbdf0148760d084a032cefde14be5\": container with ID starting with bff178bedb85f2609e7d5b46d30c08e031dcbdf0148760d084a032cefde14be5 not found: ID does not exist" Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.135744 4935 scope.go:117] "RemoveContainer" containerID="192ed1e3f86098ca61ab80b0059d39561fe3d46c1b747f2208123d9e378ce298" Dec 01 19:14:49 crc kubenswrapper[4935]: E1201 19:14:49.136458 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"192ed1e3f86098ca61ab80b0059d39561fe3d46c1b747f2208123d9e378ce298\": container with ID starting with 192ed1e3f86098ca61ab80b0059d39561fe3d46c1b747f2208123d9e378ce298 not found: ID does not exist" containerID="192ed1e3f86098ca61ab80b0059d39561fe3d46c1b747f2208123d9e378ce298" Dec 01 19:14:49 crc kubenswrapper[4935]: I1201 19:14:49.136492 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"192ed1e3f86098ca61ab80b0059d39561fe3d46c1b747f2208123d9e378ce298"} err="failed to get container status \"192ed1e3f86098ca61ab80b0059d39561fe3d46c1b747f2208123d9e378ce298\": rpc error: code = NotFound desc = could not find container \"192ed1e3f86098ca61ab80b0059d39561fe3d46c1b747f2208123d9e378ce298\": container with ID starting with 192ed1e3f86098ca61ab80b0059d39561fe3d46c1b747f2208123d9e378ce298 not found: ID does not exist" Dec 01 19:14:50 crc kubenswrapper[4935]: I1201 19:14:50.532954 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c27203b4-93da-4937-a8ff-f22695b745f1" path="/var/lib/kubelet/pods/c27203b4-93da-4937-a8ff-f22695b745f1/volumes" Dec 01 19:14:54 crc kubenswrapper[4935]: I1201 19:14:54.346042 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:14:54 crc kubenswrapper[4935]: I1201 19:14:54.346735 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.184319 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb"] Dec 01 19:15:00 crc kubenswrapper[4935]: E1201 19:15:00.195901 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c27203b4-93da-4937-a8ff-f22695b745f1" containerName="extract-utilities" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.195944 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c27203b4-93da-4937-a8ff-f22695b745f1" containerName="extract-utilities" Dec 01 19:15:00 crc kubenswrapper[4935]: E1201 19:15:00.196674 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c27203b4-93da-4937-a8ff-f22695b745f1" containerName="extract-content" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.196688 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c27203b4-93da-4937-a8ff-f22695b745f1" containerName="extract-content" Dec 01 19:15:00 crc kubenswrapper[4935]: E1201 19:15:00.196715 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c27203b4-93da-4937-a8ff-f22695b745f1" containerName="registry-server" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.196722 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="c27203b4-93da-4937-a8ff-f22695b745f1" containerName="registry-server" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.197561 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="c27203b4-93da-4937-a8ff-f22695b745f1" containerName="registry-server" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.199139 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.201697 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.201805 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.220944 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb"] Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.292317 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6011b2ab-6f11-47bf-98ef-dabde15ec278-secret-volume\") pod \"collect-profiles-29410275-lzcjb\" (UID: \"6011b2ab-6f11-47bf-98ef-dabde15ec278\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.292445 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86xk4\" (UniqueName: \"kubernetes.io/projected/6011b2ab-6f11-47bf-98ef-dabde15ec278-kube-api-access-86xk4\") pod \"collect-profiles-29410275-lzcjb\" (UID: \"6011b2ab-6f11-47bf-98ef-dabde15ec278\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.292615 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6011b2ab-6f11-47bf-98ef-dabde15ec278-config-volume\") pod \"collect-profiles-29410275-lzcjb\" (UID: \"6011b2ab-6f11-47bf-98ef-dabde15ec278\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.395270 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6011b2ab-6f11-47bf-98ef-dabde15ec278-config-volume\") pod \"collect-profiles-29410275-lzcjb\" (UID: \"6011b2ab-6f11-47bf-98ef-dabde15ec278\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.395792 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6011b2ab-6f11-47bf-98ef-dabde15ec278-secret-volume\") pod \"collect-profiles-29410275-lzcjb\" (UID: \"6011b2ab-6f11-47bf-98ef-dabde15ec278\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.396411 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86xk4\" (UniqueName: \"kubernetes.io/projected/6011b2ab-6f11-47bf-98ef-dabde15ec278-kube-api-access-86xk4\") pod \"collect-profiles-29410275-lzcjb\" (UID: \"6011b2ab-6f11-47bf-98ef-dabde15ec278\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.397221 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6011b2ab-6f11-47bf-98ef-dabde15ec278-config-volume\") pod \"collect-profiles-29410275-lzcjb\" (UID: \"6011b2ab-6f11-47bf-98ef-dabde15ec278\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.405003 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6011b2ab-6f11-47bf-98ef-dabde15ec278-secret-volume\") pod \"collect-profiles-29410275-lzcjb\" (UID: \"6011b2ab-6f11-47bf-98ef-dabde15ec278\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.414017 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86xk4\" (UniqueName: \"kubernetes.io/projected/6011b2ab-6f11-47bf-98ef-dabde15ec278-kube-api-access-86xk4\") pod \"collect-profiles-29410275-lzcjb\" (UID: \"6011b2ab-6f11-47bf-98ef-dabde15ec278\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.532349 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" Dec 01 19:15:00 crc kubenswrapper[4935]: I1201 19:15:00.996896 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb"] Dec 01 19:15:00 crc kubenswrapper[4935]: W1201 19:15:00.999137 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6011b2ab_6f11_47bf_98ef_dabde15ec278.slice/crio-0678df4a6eb6fe79e05fa7778d3b6033f75d44aec98746ff4956511bd966e8ac WatchSource:0}: Error finding container 0678df4a6eb6fe79e05fa7778d3b6033f75d44aec98746ff4956511bd966e8ac: Status 404 returned error can't find the container with id 0678df4a6eb6fe79e05fa7778d3b6033f75d44aec98746ff4956511bd966e8ac Dec 01 19:15:01 crc kubenswrapper[4935]: I1201 19:15:01.168082 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" event={"ID":"6011b2ab-6f11-47bf-98ef-dabde15ec278","Type":"ContainerStarted","Data":"0678df4a6eb6fe79e05fa7778d3b6033f75d44aec98746ff4956511bd966e8ac"} Dec 01 19:15:02 crc kubenswrapper[4935]: I1201 19:15:02.179073 4935 generic.go:334] "Generic (PLEG): container finished" podID="6011b2ab-6f11-47bf-98ef-dabde15ec278" containerID="41b9560bfd51da66c9b62dc3530eb876f12181193b636a2f6ad73e5acbcbafd6" exitCode=0 Dec 01 19:15:02 crc kubenswrapper[4935]: I1201 19:15:02.179194 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" event={"ID":"6011b2ab-6f11-47bf-98ef-dabde15ec278","Type":"ContainerDied","Data":"41b9560bfd51da66c9b62dc3530eb876f12181193b636a2f6ad73e5acbcbafd6"} Dec 01 19:15:03 crc kubenswrapper[4935]: I1201 19:15:03.589890 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" Dec 01 19:15:03 crc kubenswrapper[4935]: I1201 19:15:03.670061 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6011b2ab-6f11-47bf-98ef-dabde15ec278-secret-volume\") pod \"6011b2ab-6f11-47bf-98ef-dabde15ec278\" (UID: \"6011b2ab-6f11-47bf-98ef-dabde15ec278\") " Dec 01 19:15:03 crc kubenswrapper[4935]: I1201 19:15:03.670461 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6011b2ab-6f11-47bf-98ef-dabde15ec278-config-volume\") pod \"6011b2ab-6f11-47bf-98ef-dabde15ec278\" (UID: \"6011b2ab-6f11-47bf-98ef-dabde15ec278\") " Dec 01 19:15:03 crc kubenswrapper[4935]: I1201 19:15:03.670861 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86xk4\" (UniqueName: \"kubernetes.io/projected/6011b2ab-6f11-47bf-98ef-dabde15ec278-kube-api-access-86xk4\") pod \"6011b2ab-6f11-47bf-98ef-dabde15ec278\" (UID: \"6011b2ab-6f11-47bf-98ef-dabde15ec278\") " Dec 01 19:15:03 crc kubenswrapper[4935]: I1201 19:15:03.671361 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6011b2ab-6f11-47bf-98ef-dabde15ec278-config-volume" (OuterVolumeSpecName: "config-volume") pod "6011b2ab-6f11-47bf-98ef-dabde15ec278" (UID: "6011b2ab-6f11-47bf-98ef-dabde15ec278"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:15:03 crc kubenswrapper[4935]: I1201 19:15:03.672281 4935 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6011b2ab-6f11-47bf-98ef-dabde15ec278-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 19:15:03 crc kubenswrapper[4935]: I1201 19:15:03.676974 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6011b2ab-6f11-47bf-98ef-dabde15ec278-kube-api-access-86xk4" (OuterVolumeSpecName: "kube-api-access-86xk4") pod "6011b2ab-6f11-47bf-98ef-dabde15ec278" (UID: "6011b2ab-6f11-47bf-98ef-dabde15ec278"). InnerVolumeSpecName "kube-api-access-86xk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:15:03 crc kubenswrapper[4935]: I1201 19:15:03.677228 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6011b2ab-6f11-47bf-98ef-dabde15ec278-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6011b2ab-6f11-47bf-98ef-dabde15ec278" (UID: "6011b2ab-6f11-47bf-98ef-dabde15ec278"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:15:03 crc kubenswrapper[4935]: I1201 19:15:03.774251 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86xk4\" (UniqueName: \"kubernetes.io/projected/6011b2ab-6f11-47bf-98ef-dabde15ec278-kube-api-access-86xk4\") on node \"crc\" DevicePath \"\"" Dec 01 19:15:03 crc kubenswrapper[4935]: I1201 19:15:03.774285 4935 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6011b2ab-6f11-47bf-98ef-dabde15ec278-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 19:15:04 crc kubenswrapper[4935]: I1201 19:15:04.207060 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" event={"ID":"6011b2ab-6f11-47bf-98ef-dabde15ec278","Type":"ContainerDied","Data":"0678df4a6eb6fe79e05fa7778d3b6033f75d44aec98746ff4956511bd966e8ac"} Dec 01 19:15:04 crc kubenswrapper[4935]: I1201 19:15:04.207103 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0678df4a6eb6fe79e05fa7778d3b6033f75d44aec98746ff4956511bd966e8ac" Dec 01 19:15:04 crc kubenswrapper[4935]: I1201 19:15:04.207126 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb" Dec 01 19:15:04 crc kubenswrapper[4935]: I1201 19:15:04.703725 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b"] Dec 01 19:15:04 crc kubenswrapper[4935]: I1201 19:15:04.716392 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410230-xt77b"] Dec 01 19:15:06 crc kubenswrapper[4935]: I1201 19:15:06.521509 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8bac674-5c61-4782-9f74-6374a430e7fc" path="/var/lib/kubelet/pods/d8bac674-5c61-4782-9f74-6374a430e7fc/volumes" Dec 01 19:15:10 crc kubenswrapper[4935]: I1201 19:15:10.467555 4935 scope.go:117] "RemoveContainer" containerID="1c23ef9b6e2724226042c04f6e05dbd2c565564ebae99b96f2638c912d1264fc" Dec 01 19:15:24 crc kubenswrapper[4935]: I1201 19:15:24.346289 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:15:24 crc kubenswrapper[4935]: I1201 19:15:24.346933 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:15:54 crc kubenswrapper[4935]: I1201 19:15:54.345728 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:15:54 crc kubenswrapper[4935]: I1201 19:15:54.346317 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:15:54 crc kubenswrapper[4935]: I1201 19:15:54.346364 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 19:15:54 crc kubenswrapper[4935]: I1201 19:15:54.347345 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f2948ef4f4214b0864b73ab5c34e54e081d188d3aac2e23446397b144967d996"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:15:54 crc kubenswrapper[4935]: I1201 19:15:54.347417 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://f2948ef4f4214b0864b73ab5c34e54e081d188d3aac2e23446397b144967d996" gracePeriod=600 Dec 01 19:15:54 crc kubenswrapper[4935]: I1201 19:15:54.876634 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="f2948ef4f4214b0864b73ab5c34e54e081d188d3aac2e23446397b144967d996" exitCode=0 Dec 01 19:15:54 crc kubenswrapper[4935]: I1201 19:15:54.877277 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"f2948ef4f4214b0864b73ab5c34e54e081d188d3aac2e23446397b144967d996"} Dec 01 19:15:54 crc kubenswrapper[4935]: I1201 19:15:54.877306 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf"} Dec 01 19:15:54 crc kubenswrapper[4935]: I1201 19:15:54.877323 4935 scope.go:117] "RemoveContainer" containerID="c3ee1d3232bc0fc96e009bf27eb40ebbfdd6faf58d6ca37ff3acf9e8cf2f25d4" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.436637 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5n94v"] Dec 01 19:16:37 crc kubenswrapper[4935]: E1201 19:16:37.437622 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6011b2ab-6f11-47bf-98ef-dabde15ec278" containerName="collect-profiles" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.437635 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="6011b2ab-6f11-47bf-98ef-dabde15ec278" containerName="collect-profiles" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.437902 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="6011b2ab-6f11-47bf-98ef-dabde15ec278" containerName="collect-profiles" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.439568 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.456364 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5n94v"] Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.578350 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-catalog-content\") pod \"community-operators-5n94v\" (UID: \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\") " pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.579035 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8w6j\" (UniqueName: \"kubernetes.io/projected/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-kube-api-access-f8w6j\") pod \"community-operators-5n94v\" (UID: \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\") " pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.579111 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-utilities\") pod \"community-operators-5n94v\" (UID: \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\") " pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.681583 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8w6j\" (UniqueName: \"kubernetes.io/projected/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-kube-api-access-f8w6j\") pod \"community-operators-5n94v\" (UID: \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\") " pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.681631 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-utilities\") pod \"community-operators-5n94v\" (UID: \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\") " pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.681791 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-catalog-content\") pod \"community-operators-5n94v\" (UID: \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\") " pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.682194 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-utilities\") pod \"community-operators-5n94v\" (UID: \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\") " pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.682266 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-catalog-content\") pod \"community-operators-5n94v\" (UID: \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\") " pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.699905 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8w6j\" (UniqueName: \"kubernetes.io/projected/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-kube-api-access-f8w6j\") pod \"community-operators-5n94v\" (UID: \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\") " pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:37 crc kubenswrapper[4935]: I1201 19:16:37.756782 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:38 crc kubenswrapper[4935]: I1201 19:16:38.338481 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5n94v"] Dec 01 19:16:38 crc kubenswrapper[4935]: I1201 19:16:38.393976 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5n94v" event={"ID":"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d","Type":"ContainerStarted","Data":"40e16471769114357bc446916c52bf4d4a9c72c8ea70c22c27a002db6616b3f5"} Dec 01 19:16:39 crc kubenswrapper[4935]: I1201 19:16:39.413641 4935 generic.go:334] "Generic (PLEG): container finished" podID="abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" containerID="a88523f908ff0836ac87db8001f37d2ac5cc6177dd727fc3b03eb7e035bca1ca" exitCode=0 Dec 01 19:16:39 crc kubenswrapper[4935]: I1201 19:16:39.413747 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5n94v" event={"ID":"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d","Type":"ContainerDied","Data":"a88523f908ff0836ac87db8001f37d2ac5cc6177dd727fc3b03eb7e035bca1ca"} Dec 01 19:16:41 crc kubenswrapper[4935]: I1201 19:16:41.438343 4935 generic.go:334] "Generic (PLEG): container finished" podID="abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" containerID="cc02c4aa14570e39d6dd0eb2091aeb628c3ce150fca57f6055e5f549376ca66d" exitCode=0 Dec 01 19:16:41 crc kubenswrapper[4935]: I1201 19:16:41.438431 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5n94v" event={"ID":"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d","Type":"ContainerDied","Data":"cc02c4aa14570e39d6dd0eb2091aeb628c3ce150fca57f6055e5f549376ca66d"} Dec 01 19:16:42 crc kubenswrapper[4935]: I1201 19:16:42.478132 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5n94v" podStartSLOduration=2.6921885899999998 podStartE2EDuration="5.478102337s" podCreationTimestamp="2025-12-01 19:16:37 +0000 UTC" firstStartedPulling="2025-12-01 19:16:39.417428115 +0000 UTC m=+2813.439057374" lastFinishedPulling="2025-12-01 19:16:42.203341842 +0000 UTC m=+2816.224971121" observedRunningTime="2025-12-01 19:16:42.474767251 +0000 UTC m=+2816.496396520" watchObservedRunningTime="2025-12-01 19:16:42.478102337 +0000 UTC m=+2816.499731606" Dec 01 19:16:43 crc kubenswrapper[4935]: I1201 19:16:43.467125 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5n94v" event={"ID":"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d","Type":"ContainerStarted","Data":"990c590ac300000db279611f080013ae18c550982ad065369e2e95b5e951d1d5"} Dec 01 19:16:47 crc kubenswrapper[4935]: I1201 19:16:47.757205 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:47 crc kubenswrapper[4935]: I1201 19:16:47.757735 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:47 crc kubenswrapper[4935]: I1201 19:16:47.849967 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:48 crc kubenswrapper[4935]: I1201 19:16:48.634136 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:48 crc kubenswrapper[4935]: I1201 19:16:48.690651 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5n94v"] Dec 01 19:16:50 crc kubenswrapper[4935]: I1201 19:16:50.778746 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5n94v" podUID="abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" containerName="registry-server" containerID="cri-o://990c590ac300000db279611f080013ae18c550982ad065369e2e95b5e951d1d5" gracePeriod=2 Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.455235 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.587197 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-catalog-content\") pod \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\" (UID: \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\") " Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.587725 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8w6j\" (UniqueName: \"kubernetes.io/projected/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-kube-api-access-f8w6j\") pod \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\" (UID: \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\") " Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.587854 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-utilities\") pod \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\" (UID: \"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d\") " Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.588641 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-utilities" (OuterVolumeSpecName: "utilities") pod "abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" (UID: "abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.589450 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.593579 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-kube-api-access-f8w6j" (OuterVolumeSpecName: "kube-api-access-f8w6j") pod "abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" (UID: "abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d"). InnerVolumeSpecName "kube-api-access-f8w6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.641009 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" (UID: "abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.691785 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8w6j\" (UniqueName: \"kubernetes.io/projected/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-kube-api-access-f8w6j\") on node \"crc\" DevicePath \"\"" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.691823 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.790505 4935 generic.go:334] "Generic (PLEG): container finished" podID="abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" containerID="990c590ac300000db279611f080013ae18c550982ad065369e2e95b5e951d1d5" exitCode=0 Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.790554 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5n94v" event={"ID":"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d","Type":"ContainerDied","Data":"990c590ac300000db279611f080013ae18c550982ad065369e2e95b5e951d1d5"} Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.790588 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5n94v" event={"ID":"abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d","Type":"ContainerDied","Data":"40e16471769114357bc446916c52bf4d4a9c72c8ea70c22c27a002db6616b3f5"} Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.790587 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5n94v" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.790601 4935 scope.go:117] "RemoveContainer" containerID="990c590ac300000db279611f080013ae18c550982ad065369e2e95b5e951d1d5" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.819180 4935 scope.go:117] "RemoveContainer" containerID="cc02c4aa14570e39d6dd0eb2091aeb628c3ce150fca57f6055e5f549376ca66d" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.828121 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5n94v"] Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.847484 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5n94v"] Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.863765 4935 scope.go:117] "RemoveContainer" containerID="a88523f908ff0836ac87db8001f37d2ac5cc6177dd727fc3b03eb7e035bca1ca" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.893253 4935 scope.go:117] "RemoveContainer" containerID="990c590ac300000db279611f080013ae18c550982ad065369e2e95b5e951d1d5" Dec 01 19:16:51 crc kubenswrapper[4935]: E1201 19:16:51.894998 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"990c590ac300000db279611f080013ae18c550982ad065369e2e95b5e951d1d5\": container with ID starting with 990c590ac300000db279611f080013ae18c550982ad065369e2e95b5e951d1d5 not found: ID does not exist" containerID="990c590ac300000db279611f080013ae18c550982ad065369e2e95b5e951d1d5" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.895048 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"990c590ac300000db279611f080013ae18c550982ad065369e2e95b5e951d1d5"} err="failed to get container status \"990c590ac300000db279611f080013ae18c550982ad065369e2e95b5e951d1d5\": rpc error: code = NotFound desc = could not find container \"990c590ac300000db279611f080013ae18c550982ad065369e2e95b5e951d1d5\": container with ID starting with 990c590ac300000db279611f080013ae18c550982ad065369e2e95b5e951d1d5 not found: ID does not exist" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.895080 4935 scope.go:117] "RemoveContainer" containerID="cc02c4aa14570e39d6dd0eb2091aeb628c3ce150fca57f6055e5f549376ca66d" Dec 01 19:16:51 crc kubenswrapper[4935]: E1201 19:16:51.895711 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc02c4aa14570e39d6dd0eb2091aeb628c3ce150fca57f6055e5f549376ca66d\": container with ID starting with cc02c4aa14570e39d6dd0eb2091aeb628c3ce150fca57f6055e5f549376ca66d not found: ID does not exist" containerID="cc02c4aa14570e39d6dd0eb2091aeb628c3ce150fca57f6055e5f549376ca66d" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.895763 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc02c4aa14570e39d6dd0eb2091aeb628c3ce150fca57f6055e5f549376ca66d"} err="failed to get container status \"cc02c4aa14570e39d6dd0eb2091aeb628c3ce150fca57f6055e5f549376ca66d\": rpc error: code = NotFound desc = could not find container \"cc02c4aa14570e39d6dd0eb2091aeb628c3ce150fca57f6055e5f549376ca66d\": container with ID starting with cc02c4aa14570e39d6dd0eb2091aeb628c3ce150fca57f6055e5f549376ca66d not found: ID does not exist" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.895802 4935 scope.go:117] "RemoveContainer" containerID="a88523f908ff0836ac87db8001f37d2ac5cc6177dd727fc3b03eb7e035bca1ca" Dec 01 19:16:51 crc kubenswrapper[4935]: E1201 19:16:51.896291 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a88523f908ff0836ac87db8001f37d2ac5cc6177dd727fc3b03eb7e035bca1ca\": container with ID starting with a88523f908ff0836ac87db8001f37d2ac5cc6177dd727fc3b03eb7e035bca1ca not found: ID does not exist" containerID="a88523f908ff0836ac87db8001f37d2ac5cc6177dd727fc3b03eb7e035bca1ca" Dec 01 19:16:51 crc kubenswrapper[4935]: I1201 19:16:51.896335 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a88523f908ff0836ac87db8001f37d2ac5cc6177dd727fc3b03eb7e035bca1ca"} err="failed to get container status \"a88523f908ff0836ac87db8001f37d2ac5cc6177dd727fc3b03eb7e035bca1ca\": rpc error: code = NotFound desc = could not find container \"a88523f908ff0836ac87db8001f37d2ac5cc6177dd727fc3b03eb7e035bca1ca\": container with ID starting with a88523f908ff0836ac87db8001f37d2ac5cc6177dd727fc3b03eb7e035bca1ca not found: ID does not exist" Dec 01 19:16:52 crc kubenswrapper[4935]: I1201 19:16:52.523448 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" path="/var/lib/kubelet/pods/abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d/volumes" Dec 01 19:17:38 crc kubenswrapper[4935]: E1201 19:17:38.047282 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod666d7f3a_f7f0_456e_b027_e68d2d8b1dbd.slice/crio-conmon-28421b70af018def94b2dde45518abd259974a14267e7577787aa1adca542b55.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:17:38 crc kubenswrapper[4935]: I1201 19:17:38.422806 4935 generic.go:334] "Generic (PLEG): container finished" podID="666d7f3a-f7f0-456e-b027-e68d2d8b1dbd" containerID="28421b70af018def94b2dde45518abd259974a14267e7577787aa1adca542b55" exitCode=0 Dec 01 19:17:38 crc kubenswrapper[4935]: I1201 19:17:38.422871 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" event={"ID":"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd","Type":"ContainerDied","Data":"28421b70af018def94b2dde45518abd259974a14267e7577787aa1adca542b55"} Dec 01 19:17:39 crc kubenswrapper[4935]: I1201 19:17:39.982119 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.017323 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-libvirt-combined-ca-bundle\") pod \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.017470 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdf6t\" (UniqueName: \"kubernetes.io/projected/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-kube-api-access-sdf6t\") pod \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.017528 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-inventory\") pod \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.017586 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-ssh-key\") pod \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.018394 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-libvirt-secret-0\") pod \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\" (UID: \"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd\") " Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.025215 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "666d7f3a-f7f0-456e-b027-e68d2d8b1dbd" (UID: "666d7f3a-f7f0-456e-b027-e68d2d8b1dbd"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.025454 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-kube-api-access-sdf6t" (OuterVolumeSpecName: "kube-api-access-sdf6t") pod "666d7f3a-f7f0-456e-b027-e68d2d8b1dbd" (UID: "666d7f3a-f7f0-456e-b027-e68d2d8b1dbd"). InnerVolumeSpecName "kube-api-access-sdf6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.053450 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "666d7f3a-f7f0-456e-b027-e68d2d8b1dbd" (UID: "666d7f3a-f7f0-456e-b027-e68d2d8b1dbd"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.054770 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "666d7f3a-f7f0-456e-b027-e68d2d8b1dbd" (UID: "666d7f3a-f7f0-456e-b027-e68d2d8b1dbd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.065334 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-inventory" (OuterVolumeSpecName: "inventory") pod "666d7f3a-f7f0-456e-b027-e68d2d8b1dbd" (UID: "666d7f3a-f7f0-456e-b027-e68d2d8b1dbd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.121605 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.121668 4935 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.121689 4935 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.121703 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdf6t\" (UniqueName: \"kubernetes.io/projected/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-kube-api-access-sdf6t\") on node \"crc\" DevicePath \"\"" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.121716 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/666d7f3a-f7f0-456e-b027-e68d2d8b1dbd-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.451607 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" event={"ID":"666d7f3a-f7f0-456e-b027-e68d2d8b1dbd","Type":"ContainerDied","Data":"8372783f10d8c5085ef07ebdbeb5f9fef098e8d7a5c8fa26cddfd1ebc7c8577a"} Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.451649 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8372783f10d8c5085ef07ebdbeb5f9fef098e8d7a5c8fa26cddfd1ebc7c8577a" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.451675 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-szrdm" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.563990 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn"] Dec 01 19:17:40 crc kubenswrapper[4935]: E1201 19:17:40.564794 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" containerName="extract-content" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.564825 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" containerName="extract-content" Dec 01 19:17:40 crc kubenswrapper[4935]: E1201 19:17:40.564862 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" containerName="extract-utilities" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.564874 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" containerName="extract-utilities" Dec 01 19:17:40 crc kubenswrapper[4935]: E1201 19:17:40.564901 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" containerName="registry-server" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.564912 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" containerName="registry-server" Dec 01 19:17:40 crc kubenswrapper[4935]: E1201 19:17:40.564937 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="666d7f3a-f7f0-456e-b027-e68d2d8b1dbd" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.564949 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="666d7f3a-f7f0-456e-b027-e68d2d8b1dbd" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.565313 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="666d7f3a-f7f0-456e-b027-e68d2d8b1dbd" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.565364 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd02dfb-43f9-4d4d-a4e4-9d9ce67d983d" containerName="registry-server" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.566639 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.571405 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.571524 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.571580 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.572214 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.572241 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.572459 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.572729 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.589222 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn"] Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.739898 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn9dr\" (UniqueName: \"kubernetes.io/projected/32e713b7-2006-4964-8c35-9b884a10c3d3-kube-api-access-jn9dr\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.740372 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.740641 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.740763 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.740821 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.740863 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.740998 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.741103 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.741302 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.843701 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn9dr\" (UniqueName: \"kubernetes.io/projected/32e713b7-2006-4964-8c35-9b884a10c3d3-kube-api-access-jn9dr\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.843765 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.843844 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.843873 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.843899 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.843920 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.843974 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.844027 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.844073 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.845003 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.849812 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.849837 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.849844 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.851117 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.851363 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.851489 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.851619 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.862133 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn9dr\" (UniqueName: \"kubernetes.io/projected/32e713b7-2006-4964-8c35-9b884a10c3d3-kube-api-access-jn9dr\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9fzcn\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:40 crc kubenswrapper[4935]: I1201 19:17:40.898400 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:17:41 crc kubenswrapper[4935]: I1201 19:17:41.487855 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn"] Dec 01 19:17:41 crc kubenswrapper[4935]: W1201 19:17:41.491281 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32e713b7_2006_4964_8c35_9b884a10c3d3.slice/crio-9d6045917f00bbfe32940f10a5c4635581063c53dbb8cd971643ae6cbe24a64a WatchSource:0}: Error finding container 9d6045917f00bbfe32940f10a5c4635581063c53dbb8cd971643ae6cbe24a64a: Status 404 returned error can't find the container with id 9d6045917f00bbfe32940f10a5c4635581063c53dbb8cd971643ae6cbe24a64a Dec 01 19:17:42 crc kubenswrapper[4935]: I1201 19:17:42.482250 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" event={"ID":"32e713b7-2006-4964-8c35-9b884a10c3d3","Type":"ContainerStarted","Data":"90c605fcdaa789a808a5eac1dc7a27f873aa3ca9e03f7c02b36ba6b331cda7ac"} Dec 01 19:17:42 crc kubenswrapper[4935]: I1201 19:17:42.482598 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" event={"ID":"32e713b7-2006-4964-8c35-9b884a10c3d3","Type":"ContainerStarted","Data":"9d6045917f00bbfe32940f10a5c4635581063c53dbb8cd971643ae6cbe24a64a"} Dec 01 19:17:42 crc kubenswrapper[4935]: I1201 19:17:42.513314 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" podStartSLOduration=1.8366878660000001 podStartE2EDuration="2.513295071s" podCreationTimestamp="2025-12-01 19:17:40 +0000 UTC" firstStartedPulling="2025-12-01 19:17:41.494303439 +0000 UTC m=+2875.515932698" lastFinishedPulling="2025-12-01 19:17:42.170910644 +0000 UTC m=+2876.192539903" observedRunningTime="2025-12-01 19:17:42.509613594 +0000 UTC m=+2876.531242853" watchObservedRunningTime="2025-12-01 19:17:42.513295071 +0000 UTC m=+2876.534924340" Dec 01 19:17:54 crc kubenswrapper[4935]: I1201 19:17:54.346666 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:17:54 crc kubenswrapper[4935]: I1201 19:17:54.347438 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:18:24 crc kubenswrapper[4935]: I1201 19:18:24.346204 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:18:24 crc kubenswrapper[4935]: I1201 19:18:24.346704 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:18:54 crc kubenswrapper[4935]: I1201 19:18:54.346387 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:18:54 crc kubenswrapper[4935]: I1201 19:18:54.347071 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:18:54 crc kubenswrapper[4935]: I1201 19:18:54.347137 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 19:18:54 crc kubenswrapper[4935]: I1201 19:18:54.348101 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:18:54 crc kubenswrapper[4935]: I1201 19:18:54.348241 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" gracePeriod=600 Dec 01 19:18:54 crc kubenswrapper[4935]: E1201 19:18:54.481768 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:18:54 crc kubenswrapper[4935]: I1201 19:18:54.543879 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" exitCode=0 Dec 01 19:18:54 crc kubenswrapper[4935]: I1201 19:18:54.544276 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf"} Dec 01 19:18:54 crc kubenswrapper[4935]: I1201 19:18:54.544364 4935 scope.go:117] "RemoveContainer" containerID="f2948ef4f4214b0864b73ab5c34e54e081d188d3aac2e23446397b144967d996" Dec 01 19:18:54 crc kubenswrapper[4935]: I1201 19:18:54.546524 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:18:54 crc kubenswrapper[4935]: E1201 19:18:54.549067 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:19:05 crc kubenswrapper[4935]: I1201 19:19:05.509024 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:19:05 crc kubenswrapper[4935]: E1201 19:19:05.510165 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:19:17 crc kubenswrapper[4935]: I1201 19:19:17.508481 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:19:17 crc kubenswrapper[4935]: E1201 19:19:17.509781 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:19:30 crc kubenswrapper[4935]: I1201 19:19:30.510301 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:19:30 crc kubenswrapper[4935]: E1201 19:19:30.511028 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:19:44 crc kubenswrapper[4935]: I1201 19:19:44.508882 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:19:44 crc kubenswrapper[4935]: E1201 19:19:44.510036 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:19:57 crc kubenswrapper[4935]: I1201 19:19:57.508921 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:19:57 crc kubenswrapper[4935]: E1201 19:19:57.509843 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:20:12 crc kubenswrapper[4935]: I1201 19:20:12.509349 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:20:12 crc kubenswrapper[4935]: E1201 19:20:12.510604 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:20:26 crc kubenswrapper[4935]: I1201 19:20:26.522263 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:20:26 crc kubenswrapper[4935]: E1201 19:20:26.523483 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:20:37 crc kubenswrapper[4935]: I1201 19:20:37.508521 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:20:37 crc kubenswrapper[4935]: E1201 19:20:37.509488 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:20:39 crc kubenswrapper[4935]: W1201 19:20:39.983683 4935 helpers.go:245] readString: Failed to read "/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32e713b7_2006_4964_8c35_9b884a10c3d3.slice/crio-9d6045917f00bbfe32940f10a5c4635581063c53dbb8cd971643ae6cbe24a64a/memory.min": read /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32e713b7_2006_4964_8c35_9b884a10c3d3.slice/crio-9d6045917f00bbfe32940f10a5c4635581063c53dbb8cd971643ae6cbe24a64a/memory.min: no such device Dec 01 19:20:40 crc kubenswrapper[4935]: I1201 19:20:40.880200 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" event={"ID":"32e713b7-2006-4964-8c35-9b884a10c3d3","Type":"ContainerDied","Data":"90c605fcdaa789a808a5eac1dc7a27f873aa3ca9e03f7c02b36ba6b331cda7ac"} Dec 01 19:20:40 crc kubenswrapper[4935]: I1201 19:20:40.880139 4935 generic.go:334] "Generic (PLEG): container finished" podID="32e713b7-2006-4964-8c35-9b884a10c3d3" containerID="90c605fcdaa789a808a5eac1dc7a27f873aa3ca9e03f7c02b36ba6b331cda7ac" exitCode=0 Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.336585 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.404172 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-cell1-compute-config-0\") pod \"32e713b7-2006-4964-8c35-9b884a10c3d3\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.404219 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-0\") pod \"32e713b7-2006-4964-8c35-9b884a10c3d3\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.404274 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-extra-config-0\") pod \"32e713b7-2006-4964-8c35-9b884a10c3d3\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.404337 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-1\") pod \"32e713b7-2006-4964-8c35-9b884a10c3d3\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.404368 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-combined-ca-bundle\") pod \"32e713b7-2006-4964-8c35-9b884a10c3d3\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.404412 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jn9dr\" (UniqueName: \"kubernetes.io/projected/32e713b7-2006-4964-8c35-9b884a10c3d3-kube-api-access-jn9dr\") pod \"32e713b7-2006-4964-8c35-9b884a10c3d3\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.404462 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-inventory\") pod \"32e713b7-2006-4964-8c35-9b884a10c3d3\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.404632 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-ssh-key\") pod \"32e713b7-2006-4964-8c35-9b884a10c3d3\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.404710 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-cell1-compute-config-1\") pod \"32e713b7-2006-4964-8c35-9b884a10c3d3\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.414929 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "32e713b7-2006-4964-8c35-9b884a10c3d3" (UID: "32e713b7-2006-4964-8c35-9b884a10c3d3"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.420434 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32e713b7-2006-4964-8c35-9b884a10c3d3-kube-api-access-jn9dr" (OuterVolumeSpecName: "kube-api-access-jn9dr") pod "32e713b7-2006-4964-8c35-9b884a10c3d3" (UID: "32e713b7-2006-4964-8c35-9b884a10c3d3"). InnerVolumeSpecName "kube-api-access-jn9dr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.447994 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "32e713b7-2006-4964-8c35-9b884a10c3d3" (UID: "32e713b7-2006-4964-8c35-9b884a10c3d3"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.464825 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-inventory" (OuterVolumeSpecName: "inventory") pod "32e713b7-2006-4964-8c35-9b884a10c3d3" (UID: "32e713b7-2006-4964-8c35-9b884a10c3d3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.473917 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "32e713b7-2006-4964-8c35-9b884a10c3d3" (UID: "32e713b7-2006-4964-8c35-9b884a10c3d3"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.473938 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "32e713b7-2006-4964-8c35-9b884a10c3d3" (UID: "32e713b7-2006-4964-8c35-9b884a10c3d3"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.487387 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "32e713b7-2006-4964-8c35-9b884a10c3d3" (UID: "32e713b7-2006-4964-8c35-9b884a10c3d3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.489123 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "32e713b7-2006-4964-8c35-9b884a10c3d3" (UID: "32e713b7-2006-4964-8c35-9b884a10c3d3"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.505568 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "32e713b7-2006-4964-8c35-9b884a10c3d3" (UID: "32e713b7-2006-4964-8c35-9b884a10c3d3"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.506702 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-0\") pod \"32e713b7-2006-4964-8c35-9b884a10c3d3\" (UID: \"32e713b7-2006-4964-8c35-9b884a10c3d3\") " Dec 01 19:20:42 crc kubenswrapper[4935]: W1201 19:20:42.506804 4935 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/32e713b7-2006-4964-8c35-9b884a10c3d3/volumes/kubernetes.io~secret/nova-migration-ssh-key-0 Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.506814 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "32e713b7-2006-4964-8c35-9b884a10c3d3" (UID: "32e713b7-2006-4964-8c35-9b884a10c3d3"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.507916 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.507962 4935 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.507983 4935 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.508001 4935 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.508019 4935 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.508036 4935 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.508054 4935 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.508070 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jn9dr\" (UniqueName: \"kubernetes.io/projected/32e713b7-2006-4964-8c35-9b884a10c3d3-kube-api-access-jn9dr\") on node \"crc\" DevicePath \"\"" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.508089 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/32e713b7-2006-4964-8c35-9b884a10c3d3-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.925226 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" event={"ID":"32e713b7-2006-4964-8c35-9b884a10c3d3","Type":"ContainerDied","Data":"9d6045917f00bbfe32940f10a5c4635581063c53dbb8cd971643ae6cbe24a64a"} Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.925281 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d6045917f00bbfe32940f10a5c4635581063c53dbb8cd971643ae6cbe24a64a" Dec 01 19:20:42 crc kubenswrapper[4935]: I1201 19:20:42.925362 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9fzcn" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.028133 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz"] Dec 01 19:20:43 crc kubenswrapper[4935]: E1201 19:20:43.028660 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e713b7-2006-4964-8c35-9b884a10c3d3" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.028678 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e713b7-2006-4964-8c35-9b884a10c3d3" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.028929 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="32e713b7-2006-4964-8c35-9b884a10c3d3" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.029864 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.031887 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.032463 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.032466 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.032900 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.038660 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.045025 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz"] Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.126328 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.126666 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxl72\" (UniqueName: \"kubernetes.io/projected/bc25f29a-826c-4823-95c8-1bba009e771f-kube-api-access-jxl72\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.126718 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.126760 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.126798 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.126827 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.126848 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.229059 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.229235 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.229336 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.229372 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.229413 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.229583 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.229756 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxl72\" (UniqueName: \"kubernetes.io/projected/bc25f29a-826c-4823-95c8-1bba009e771f-kube-api-access-jxl72\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.234822 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.234947 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.235091 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.235857 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.236120 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.236602 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.251013 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxl72\" (UniqueName: \"kubernetes.io/projected/bc25f29a-826c-4823-95c8-1bba009e771f-kube-api-access-jxl72\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.353178 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.940337 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz"] Dec 01 19:20:43 crc kubenswrapper[4935]: I1201 19:20:43.950777 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 19:20:44 crc kubenswrapper[4935]: I1201 19:20:44.954563 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" event={"ID":"bc25f29a-826c-4823-95c8-1bba009e771f","Type":"ContainerStarted","Data":"56cf511702247978997a2ad0a68bb1e480f4ebffada88d7f4b9143998936975d"} Dec 01 19:20:44 crc kubenswrapper[4935]: I1201 19:20:44.955137 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" event={"ID":"bc25f29a-826c-4823-95c8-1bba009e771f","Type":"ContainerStarted","Data":"ee3d8b206d7a9a222657b29274f3dc4e92f0732aa4c64ac4e3c75b1cb1613397"} Dec 01 19:20:44 crc kubenswrapper[4935]: I1201 19:20:44.977144 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" podStartSLOduration=1.479068526 podStartE2EDuration="1.977119804s" podCreationTimestamp="2025-12-01 19:20:43 +0000 UTC" firstStartedPulling="2025-12-01 19:20:43.950532872 +0000 UTC m=+3057.972162131" lastFinishedPulling="2025-12-01 19:20:44.44858414 +0000 UTC m=+3058.470213409" observedRunningTime="2025-12-01 19:20:44.973733636 +0000 UTC m=+3058.995362935" watchObservedRunningTime="2025-12-01 19:20:44.977119804 +0000 UTC m=+3058.998749063" Dec 01 19:20:51 crc kubenswrapper[4935]: I1201 19:20:51.509503 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:20:51 crc kubenswrapper[4935]: E1201 19:20:51.510402 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:21:04 crc kubenswrapper[4935]: I1201 19:21:04.509406 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:21:04 crc kubenswrapper[4935]: E1201 19:21:04.510041 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:21:18 crc kubenswrapper[4935]: I1201 19:21:18.509318 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:21:18 crc kubenswrapper[4935]: E1201 19:21:18.510472 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:21:31 crc kubenswrapper[4935]: I1201 19:21:31.508602 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:21:31 crc kubenswrapper[4935]: E1201 19:21:31.510048 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:21:44 crc kubenswrapper[4935]: I1201 19:21:44.509329 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:21:44 crc kubenswrapper[4935]: E1201 19:21:44.510294 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:21:59 crc kubenswrapper[4935]: I1201 19:21:59.509233 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:21:59 crc kubenswrapper[4935]: E1201 19:21:59.510059 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:22:11 crc kubenswrapper[4935]: I1201 19:22:11.509344 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:22:11 crc kubenswrapper[4935]: E1201 19:22:11.510820 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:22:25 crc kubenswrapper[4935]: I1201 19:22:25.508246 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:22:25 crc kubenswrapper[4935]: E1201 19:22:25.509315 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:22:32 crc kubenswrapper[4935]: I1201 19:22:32.288793 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-6b89d75d8c-8d6z5" podUID="d97b7792-f596-4358-8b02-1ae1368ac68d" containerName="proxy-server" probeResult="failure" output="HTTP probe failed with statuscode: 502" Dec 01 19:22:39 crc kubenswrapper[4935]: I1201 19:22:39.509688 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:22:39 crc kubenswrapper[4935]: E1201 19:22:39.510912 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:22:40 crc kubenswrapper[4935]: I1201 19:22:40.946862 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qnh28"] Dec 01 19:22:40 crc kubenswrapper[4935]: I1201 19:22:40.949648 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:40 crc kubenswrapper[4935]: I1201 19:22:40.965043 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qnh28"] Dec 01 19:22:41 crc kubenswrapper[4935]: I1201 19:22:41.141768 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a951d082-52ba-4df1-b245-6362ec87d941-utilities\") pod \"certified-operators-qnh28\" (UID: \"a951d082-52ba-4df1-b245-6362ec87d941\") " pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:41 crc kubenswrapper[4935]: I1201 19:22:41.141874 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a951d082-52ba-4df1-b245-6362ec87d941-catalog-content\") pod \"certified-operators-qnh28\" (UID: \"a951d082-52ba-4df1-b245-6362ec87d941\") " pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:41 crc kubenswrapper[4935]: I1201 19:22:41.142205 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92zcd\" (UniqueName: \"kubernetes.io/projected/a951d082-52ba-4df1-b245-6362ec87d941-kube-api-access-92zcd\") pod \"certified-operators-qnh28\" (UID: \"a951d082-52ba-4df1-b245-6362ec87d941\") " pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:41 crc kubenswrapper[4935]: I1201 19:22:41.245450 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92zcd\" (UniqueName: \"kubernetes.io/projected/a951d082-52ba-4df1-b245-6362ec87d941-kube-api-access-92zcd\") pod \"certified-operators-qnh28\" (UID: \"a951d082-52ba-4df1-b245-6362ec87d941\") " pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:41 crc kubenswrapper[4935]: I1201 19:22:41.245626 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a951d082-52ba-4df1-b245-6362ec87d941-utilities\") pod \"certified-operators-qnh28\" (UID: \"a951d082-52ba-4df1-b245-6362ec87d941\") " pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:41 crc kubenswrapper[4935]: I1201 19:22:41.245684 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a951d082-52ba-4df1-b245-6362ec87d941-catalog-content\") pod \"certified-operators-qnh28\" (UID: \"a951d082-52ba-4df1-b245-6362ec87d941\") " pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:41 crc kubenswrapper[4935]: I1201 19:22:41.246180 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a951d082-52ba-4df1-b245-6362ec87d941-utilities\") pod \"certified-operators-qnh28\" (UID: \"a951d082-52ba-4df1-b245-6362ec87d941\") " pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:41 crc kubenswrapper[4935]: I1201 19:22:41.246220 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a951d082-52ba-4df1-b245-6362ec87d941-catalog-content\") pod \"certified-operators-qnh28\" (UID: \"a951d082-52ba-4df1-b245-6362ec87d941\") " pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:41 crc kubenswrapper[4935]: I1201 19:22:41.268837 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92zcd\" (UniqueName: \"kubernetes.io/projected/a951d082-52ba-4df1-b245-6362ec87d941-kube-api-access-92zcd\") pod \"certified-operators-qnh28\" (UID: \"a951d082-52ba-4df1-b245-6362ec87d941\") " pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:41 crc kubenswrapper[4935]: I1201 19:22:41.284724 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:41 crc kubenswrapper[4935]: I1201 19:22:41.823605 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qnh28"] Dec 01 19:22:42 crc kubenswrapper[4935]: I1201 19:22:42.759582 4935 generic.go:334] "Generic (PLEG): container finished" podID="a951d082-52ba-4df1-b245-6362ec87d941" containerID="ad4a8eb5b5155fb385015bd21a4944077d84f8a9c41b7cbd245e5fd5ee670af0" exitCode=0 Dec 01 19:22:42 crc kubenswrapper[4935]: I1201 19:22:42.759649 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnh28" event={"ID":"a951d082-52ba-4df1-b245-6362ec87d941","Type":"ContainerDied","Data":"ad4a8eb5b5155fb385015bd21a4944077d84f8a9c41b7cbd245e5fd5ee670af0"} Dec 01 19:22:42 crc kubenswrapper[4935]: I1201 19:22:42.760249 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnh28" event={"ID":"a951d082-52ba-4df1-b245-6362ec87d941","Type":"ContainerStarted","Data":"5bbd5804712afa7ece25d22c1ff5c36bd1ca8aff37ad18c51085255684a1733a"} Dec 01 19:22:43 crc kubenswrapper[4935]: I1201 19:22:43.779847 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnh28" event={"ID":"a951d082-52ba-4df1-b245-6362ec87d941","Type":"ContainerStarted","Data":"b8a4cf34e7d857107e588796501539c62e15593d5aad0d3619c3831fc7bcfdc1"} Dec 01 19:22:44 crc kubenswrapper[4935]: I1201 19:22:44.799929 4935 generic.go:334] "Generic (PLEG): container finished" podID="a951d082-52ba-4df1-b245-6362ec87d941" containerID="b8a4cf34e7d857107e588796501539c62e15593d5aad0d3619c3831fc7bcfdc1" exitCode=0 Dec 01 19:22:44 crc kubenswrapper[4935]: I1201 19:22:44.800058 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnh28" event={"ID":"a951d082-52ba-4df1-b245-6362ec87d941","Type":"ContainerDied","Data":"b8a4cf34e7d857107e588796501539c62e15593d5aad0d3619c3831fc7bcfdc1"} Dec 01 19:22:46 crc kubenswrapper[4935]: I1201 19:22:46.823872 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnh28" event={"ID":"a951d082-52ba-4df1-b245-6362ec87d941","Type":"ContainerStarted","Data":"74ebd85d9ccade4f20634bab0a0f4cba7356d2ed508f38ce5274b44330e89bcf"} Dec 01 19:22:46 crc kubenswrapper[4935]: I1201 19:22:46.854793 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qnh28" podStartSLOduration=3.986021785 podStartE2EDuration="6.854773382s" podCreationTimestamp="2025-12-01 19:22:40 +0000 UTC" firstStartedPulling="2025-12-01 19:22:42.764049768 +0000 UTC m=+3176.785679037" lastFinishedPulling="2025-12-01 19:22:45.632801375 +0000 UTC m=+3179.654430634" observedRunningTime="2025-12-01 19:22:46.84619994 +0000 UTC m=+3180.867829219" watchObservedRunningTime="2025-12-01 19:22:46.854773382 +0000 UTC m=+3180.876402631" Dec 01 19:22:51 crc kubenswrapper[4935]: I1201 19:22:51.285900 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:51 crc kubenswrapper[4935]: I1201 19:22:51.286363 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:51 crc kubenswrapper[4935]: I1201 19:22:51.362951 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:52 crc kubenswrapper[4935]: I1201 19:22:52.338003 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:52 crc kubenswrapper[4935]: I1201 19:22:52.393182 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qnh28"] Dec 01 19:22:54 crc kubenswrapper[4935]: I1201 19:22:54.302020 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qnh28" podUID="a951d082-52ba-4df1-b245-6362ec87d941" containerName="registry-server" containerID="cri-o://74ebd85d9ccade4f20634bab0a0f4cba7356d2ed508f38ce5274b44330e89bcf" gracePeriod=2 Dec 01 19:22:54 crc kubenswrapper[4935]: I1201 19:22:54.508426 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:22:54 crc kubenswrapper[4935]: E1201 19:22:54.508745 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.316028 4935 generic.go:334] "Generic (PLEG): container finished" podID="a951d082-52ba-4df1-b245-6362ec87d941" containerID="74ebd85d9ccade4f20634bab0a0f4cba7356d2ed508f38ce5274b44330e89bcf" exitCode=0 Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.316371 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnh28" event={"ID":"a951d082-52ba-4df1-b245-6362ec87d941","Type":"ContainerDied","Data":"74ebd85d9ccade4f20634bab0a0f4cba7356d2ed508f38ce5274b44330e89bcf"} Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.316398 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnh28" event={"ID":"a951d082-52ba-4df1-b245-6362ec87d941","Type":"ContainerDied","Data":"5bbd5804712afa7ece25d22c1ff5c36bd1ca8aff37ad18c51085255684a1733a"} Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.316409 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bbd5804712afa7ece25d22c1ff5c36bd1ca8aff37ad18c51085255684a1733a" Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.331058 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.424437 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92zcd\" (UniqueName: \"kubernetes.io/projected/a951d082-52ba-4df1-b245-6362ec87d941-kube-api-access-92zcd\") pod \"a951d082-52ba-4df1-b245-6362ec87d941\" (UID: \"a951d082-52ba-4df1-b245-6362ec87d941\") " Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.424705 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a951d082-52ba-4df1-b245-6362ec87d941-utilities\") pod \"a951d082-52ba-4df1-b245-6362ec87d941\" (UID: \"a951d082-52ba-4df1-b245-6362ec87d941\") " Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.424909 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a951d082-52ba-4df1-b245-6362ec87d941-catalog-content\") pod \"a951d082-52ba-4df1-b245-6362ec87d941\" (UID: \"a951d082-52ba-4df1-b245-6362ec87d941\") " Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.430957 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a951d082-52ba-4df1-b245-6362ec87d941-kube-api-access-92zcd" (OuterVolumeSpecName: "kube-api-access-92zcd") pod "a951d082-52ba-4df1-b245-6362ec87d941" (UID: "a951d082-52ba-4df1-b245-6362ec87d941"). InnerVolumeSpecName "kube-api-access-92zcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.433098 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a951d082-52ba-4df1-b245-6362ec87d941-utilities" (OuterVolumeSpecName: "utilities") pod "a951d082-52ba-4df1-b245-6362ec87d941" (UID: "a951d082-52ba-4df1-b245-6362ec87d941"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.477070 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a951d082-52ba-4df1-b245-6362ec87d941-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a951d082-52ba-4df1-b245-6362ec87d941" (UID: "a951d082-52ba-4df1-b245-6362ec87d941"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.527474 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a951d082-52ba-4df1-b245-6362ec87d941-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.527513 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92zcd\" (UniqueName: \"kubernetes.io/projected/a951d082-52ba-4df1-b245-6362ec87d941-kube-api-access-92zcd\") on node \"crc\" DevicePath \"\"" Dec 01 19:22:55 crc kubenswrapper[4935]: I1201 19:22:55.527528 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a951d082-52ba-4df1-b245-6362ec87d941-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:22:56 crc kubenswrapper[4935]: I1201 19:22:56.327820 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnh28" Dec 01 19:22:56 crc kubenswrapper[4935]: I1201 19:22:56.390018 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qnh28"] Dec 01 19:22:56 crc kubenswrapper[4935]: I1201 19:22:56.405669 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qnh28"] Dec 01 19:22:56 crc kubenswrapper[4935]: I1201 19:22:56.523636 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a951d082-52ba-4df1-b245-6362ec87d941" path="/var/lib/kubelet/pods/a951d082-52ba-4df1-b245-6362ec87d941/volumes" Dec 01 19:23:09 crc kubenswrapper[4935]: I1201 19:23:09.508268 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:23:09 crc kubenswrapper[4935]: E1201 19:23:09.510825 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:23:10 crc kubenswrapper[4935]: I1201 19:23:10.507615 4935 generic.go:334] "Generic (PLEG): container finished" podID="bc25f29a-826c-4823-95c8-1bba009e771f" containerID="56cf511702247978997a2ad0a68bb1e480f4ebffada88d7f4b9143998936975d" exitCode=0 Dec 01 19:23:10 crc kubenswrapper[4935]: I1201 19:23:10.529387 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" event={"ID":"bc25f29a-826c-4823-95c8-1bba009e771f","Type":"ContainerDied","Data":"56cf511702247978997a2ad0a68bb1e480f4ebffada88d7f4b9143998936975d"} Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.014392 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.086780 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-2\") pod \"bc25f29a-826c-4823-95c8-1bba009e771f\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.087011 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-0\") pod \"bc25f29a-826c-4823-95c8-1bba009e771f\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.087089 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ssh-key\") pod \"bc25f29a-826c-4823-95c8-1bba009e771f\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.087190 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-inventory\") pod \"bc25f29a-826c-4823-95c8-1bba009e771f\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.087340 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-telemetry-combined-ca-bundle\") pod \"bc25f29a-826c-4823-95c8-1bba009e771f\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.087417 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-1\") pod \"bc25f29a-826c-4823-95c8-1bba009e771f\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.087509 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxl72\" (UniqueName: \"kubernetes.io/projected/bc25f29a-826c-4823-95c8-1bba009e771f-kube-api-access-jxl72\") pod \"bc25f29a-826c-4823-95c8-1bba009e771f\" (UID: \"bc25f29a-826c-4823-95c8-1bba009e771f\") " Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.093049 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc25f29a-826c-4823-95c8-1bba009e771f-kube-api-access-jxl72" (OuterVolumeSpecName: "kube-api-access-jxl72") pod "bc25f29a-826c-4823-95c8-1bba009e771f" (UID: "bc25f29a-826c-4823-95c8-1bba009e771f"). InnerVolumeSpecName "kube-api-access-jxl72". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.093834 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "bc25f29a-826c-4823-95c8-1bba009e771f" (UID: "bc25f29a-826c-4823-95c8-1bba009e771f"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.129210 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bc25f29a-826c-4823-95c8-1bba009e771f" (UID: "bc25f29a-826c-4823-95c8-1bba009e771f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.131982 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-inventory" (OuterVolumeSpecName: "inventory") pod "bc25f29a-826c-4823-95c8-1bba009e771f" (UID: "bc25f29a-826c-4823-95c8-1bba009e771f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.132022 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "bc25f29a-826c-4823-95c8-1bba009e771f" (UID: "bc25f29a-826c-4823-95c8-1bba009e771f"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.143764 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "bc25f29a-826c-4823-95c8-1bba009e771f" (UID: "bc25f29a-826c-4823-95c8-1bba009e771f"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.158859 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "bc25f29a-826c-4823-95c8-1bba009e771f" (UID: "bc25f29a-826c-4823-95c8-1bba009e771f"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.191111 4935 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.191215 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.191231 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.191245 4935 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.191261 4935 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.191270 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxl72\" (UniqueName: \"kubernetes.io/projected/bc25f29a-826c-4823-95c8-1bba009e771f-kube-api-access-jxl72\") on node \"crc\" DevicePath \"\"" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.191281 4935 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bc25f29a-826c-4823-95c8-1bba009e771f-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.534098 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" event={"ID":"bc25f29a-826c-4823-95c8-1bba009e771f","Type":"ContainerDied","Data":"ee3d8b206d7a9a222657b29274f3dc4e92f0732aa4c64ac4e3c75b1cb1613397"} Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.534407 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee3d8b206d7a9a222657b29274f3dc4e92f0732aa4c64ac4e3c75b1cb1613397" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.534214 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.680794 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62"] Dec 01 19:23:12 crc kubenswrapper[4935]: E1201 19:23:12.681293 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a951d082-52ba-4df1-b245-6362ec87d941" containerName="registry-server" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.681311 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="a951d082-52ba-4df1-b245-6362ec87d941" containerName="registry-server" Dec 01 19:23:12 crc kubenswrapper[4935]: E1201 19:23:12.681326 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc25f29a-826c-4823-95c8-1bba009e771f" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.681333 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc25f29a-826c-4823-95c8-1bba009e771f" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 01 19:23:12 crc kubenswrapper[4935]: E1201 19:23:12.681357 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a951d082-52ba-4df1-b245-6362ec87d941" containerName="extract-utilities" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.681363 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="a951d082-52ba-4df1-b245-6362ec87d941" containerName="extract-utilities" Dec 01 19:23:12 crc kubenswrapper[4935]: E1201 19:23:12.681384 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a951d082-52ba-4df1-b245-6362ec87d941" containerName="extract-content" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.681389 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="a951d082-52ba-4df1-b245-6362ec87d941" containerName="extract-content" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.681622 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="a951d082-52ba-4df1-b245-6362ec87d941" containerName="registry-server" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.681634 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc25f29a-826c-4823-95c8-1bba009e771f" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.682426 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.688521 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.688809 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.689344 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-ipmi-config-data" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.689364 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.694627 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.714960 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62"] Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.819459 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zllvs\" (UniqueName: \"kubernetes.io/projected/008102c1-54c3-4a58-8fec-a021793e2839-kube-api-access-zllvs\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.819544 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-telemetry-power-monitoring-combined-ca-bundle\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.819574 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-2\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.819645 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-1\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.819667 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ssh-key\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.819746 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-inventory\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.819783 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-0\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.922771 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-1\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.923206 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ssh-key\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.923379 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-inventory\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.923420 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-0\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.923496 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zllvs\" (UniqueName: \"kubernetes.io/projected/008102c1-54c3-4a58-8fec-a021793e2839-kube-api-access-zllvs\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.923584 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-telemetry-power-monitoring-combined-ca-bundle\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.923622 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-2\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.926774 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-1\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.926861 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-telemetry-power-monitoring-combined-ca-bundle\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.927325 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-inventory\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.927706 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-2\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.927903 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ssh-key\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.929050 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-0\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:12 crc kubenswrapper[4935]: I1201 19:23:12.941627 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zllvs\" (UniqueName: \"kubernetes.io/projected/008102c1-54c3-4a58-8fec-a021793e2839-kube-api-access-zllvs\") pod \"telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:13 crc kubenswrapper[4935]: I1201 19:23:13.004752 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:23:13 crc kubenswrapper[4935]: I1201 19:23:13.571611 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62"] Dec 01 19:23:13 crc kubenswrapper[4935]: W1201 19:23:13.574024 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod008102c1_54c3_4a58_8fec_a021793e2839.slice/crio-29febd16c49555366827677c1f9646d94e33e0296c9a4f3d41c1fcd5c23ea83e WatchSource:0}: Error finding container 29febd16c49555366827677c1f9646d94e33e0296c9a4f3d41c1fcd5c23ea83e: Status 404 returned error can't find the container with id 29febd16c49555366827677c1f9646d94e33e0296c9a4f3d41c1fcd5c23ea83e Dec 01 19:23:14 crc kubenswrapper[4935]: I1201 19:23:14.563598 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" event={"ID":"008102c1-54c3-4a58-8fec-a021793e2839","Type":"ContainerStarted","Data":"29febd16c49555366827677c1f9646d94e33e0296c9a4f3d41c1fcd5c23ea83e"} Dec 01 19:23:15 crc kubenswrapper[4935]: I1201 19:23:15.581013 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" event={"ID":"008102c1-54c3-4a58-8fec-a021793e2839","Type":"ContainerStarted","Data":"5fa44909de2d8292559c449cb49ee55fd353580782ab2db3d052552b04b79b16"} Dec 01 19:23:15 crc kubenswrapper[4935]: I1201 19:23:15.621929 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" podStartSLOduration=2.659781893 podStartE2EDuration="3.621907719s" podCreationTimestamp="2025-12-01 19:23:12 +0000 UTC" firstStartedPulling="2025-12-01 19:23:13.577667557 +0000 UTC m=+3207.599296836" lastFinishedPulling="2025-12-01 19:23:14.539793403 +0000 UTC m=+3208.561422662" observedRunningTime="2025-12-01 19:23:15.603686592 +0000 UTC m=+3209.625315911" watchObservedRunningTime="2025-12-01 19:23:15.621907719 +0000 UTC m=+3209.643536978" Dec 01 19:23:22 crc kubenswrapper[4935]: I1201 19:23:22.509205 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:23:22 crc kubenswrapper[4935]: E1201 19:23:22.510361 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:23:35 crc kubenswrapper[4935]: I1201 19:23:35.508715 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:23:35 crc kubenswrapper[4935]: E1201 19:23:35.509610 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:23:48 crc kubenswrapper[4935]: I1201 19:23:48.509426 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:23:48 crc kubenswrapper[4935]: E1201 19:23:48.510332 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:24:00 crc kubenswrapper[4935]: I1201 19:24:00.508101 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:24:01 crc kubenswrapper[4935]: I1201 19:24:01.235046 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"47200a92c9791b4df1cc46340078b999422d5ea623d96f2ed60b96d61034521a"} Dec 01 19:25:23 crc kubenswrapper[4935]: I1201 19:25:23.331339 4935 generic.go:334] "Generic (PLEG): container finished" podID="008102c1-54c3-4a58-8fec-a021793e2839" containerID="5fa44909de2d8292559c449cb49ee55fd353580782ab2db3d052552b04b79b16" exitCode=0 Dec 01 19:25:23 crc kubenswrapper[4935]: I1201 19:25:23.331479 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" event={"ID":"008102c1-54c3-4a58-8fec-a021793e2839","Type":"ContainerDied","Data":"5fa44909de2d8292559c449cb49ee55fd353580782ab2db3d052552b04b79b16"} Dec 01 19:25:24 crc kubenswrapper[4935]: I1201 19:25:24.916530 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:25:24 crc kubenswrapper[4935]: I1201 19:25:24.943394 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-telemetry-power-monitoring-combined-ca-bundle\") pod \"008102c1-54c3-4a58-8fec-a021793e2839\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " Dec 01 19:25:24 crc kubenswrapper[4935]: I1201 19:25:24.943576 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-inventory\") pod \"008102c1-54c3-4a58-8fec-a021793e2839\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " Dec 01 19:25:24 crc kubenswrapper[4935]: I1201 19:25:24.943653 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ssh-key\") pod \"008102c1-54c3-4a58-8fec-a021793e2839\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " Dec 01 19:25:24 crc kubenswrapper[4935]: I1201 19:25:24.943846 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-1\") pod \"008102c1-54c3-4a58-8fec-a021793e2839\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " Dec 01 19:25:24 crc kubenswrapper[4935]: I1201 19:25:24.943875 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-2\") pod \"008102c1-54c3-4a58-8fec-a021793e2839\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " Dec 01 19:25:24 crc kubenswrapper[4935]: I1201 19:25:24.943932 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zllvs\" (UniqueName: \"kubernetes.io/projected/008102c1-54c3-4a58-8fec-a021793e2839-kube-api-access-zllvs\") pod \"008102c1-54c3-4a58-8fec-a021793e2839\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " Dec 01 19:25:24 crc kubenswrapper[4935]: I1201 19:25:24.943982 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-0\") pod \"008102c1-54c3-4a58-8fec-a021793e2839\" (UID: \"008102c1-54c3-4a58-8fec-a021793e2839\") " Dec 01 19:25:24 crc kubenswrapper[4935]: I1201 19:25:24.951378 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-telemetry-power-monitoring-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-power-monitoring-combined-ca-bundle") pod "008102c1-54c3-4a58-8fec-a021793e2839" (UID: "008102c1-54c3-4a58-8fec-a021793e2839"). InnerVolumeSpecName "telemetry-power-monitoring-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:25:24 crc kubenswrapper[4935]: I1201 19:25:24.959857 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/008102c1-54c3-4a58-8fec-a021793e2839-kube-api-access-zllvs" (OuterVolumeSpecName: "kube-api-access-zllvs") pod "008102c1-54c3-4a58-8fec-a021793e2839" (UID: "008102c1-54c3-4a58-8fec-a021793e2839"). InnerVolumeSpecName "kube-api-access-zllvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:25:24 crc kubenswrapper[4935]: I1201 19:25:24.987644 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-inventory" (OuterVolumeSpecName: "inventory") pod "008102c1-54c3-4a58-8fec-a021793e2839" (UID: "008102c1-54c3-4a58-8fec-a021793e2839"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.014005 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-1" (OuterVolumeSpecName: "ceilometer-ipmi-config-data-1") pod "008102c1-54c3-4a58-8fec-a021793e2839" (UID: "008102c1-54c3-4a58-8fec-a021793e2839"). InnerVolumeSpecName "ceilometer-ipmi-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.019306 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-0" (OuterVolumeSpecName: "ceilometer-ipmi-config-data-0") pod "008102c1-54c3-4a58-8fec-a021793e2839" (UID: "008102c1-54c3-4a58-8fec-a021793e2839"). InnerVolumeSpecName "ceilometer-ipmi-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.030641 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-2" (OuterVolumeSpecName: "ceilometer-ipmi-config-data-2") pod "008102c1-54c3-4a58-8fec-a021793e2839" (UID: "008102c1-54c3-4a58-8fec-a021793e2839"). InnerVolumeSpecName "ceilometer-ipmi-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.046743 4935 reconciler_common.go:293] "Volume detached for volume \"ceilometer-ipmi-config-data-1\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.046784 4935 reconciler_common.go:293] "Volume detached for volume \"ceilometer-ipmi-config-data-2\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.046796 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zllvs\" (UniqueName: \"kubernetes.io/projected/008102c1-54c3-4a58-8fec-a021793e2839-kube-api-access-zllvs\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.046809 4935 reconciler_common.go:293] "Volume detached for volume \"ceilometer-ipmi-config-data-0\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ceilometer-ipmi-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.046823 4935 reconciler_common.go:293] "Volume detached for volume \"telemetry-power-monitoring-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-telemetry-power-monitoring-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.046840 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.052513 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "008102c1-54c3-4a58-8fec-a021793e2839" (UID: "008102c1-54c3-4a58-8fec-a021793e2839"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.150908 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/008102c1-54c3-4a58-8fec-a021793e2839-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.353812 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" event={"ID":"008102c1-54c3-4a58-8fec-a021793e2839","Type":"ContainerDied","Data":"29febd16c49555366827677c1f9646d94e33e0296c9a4f3d41c1fcd5c23ea83e"} Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.353852 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29febd16c49555366827677c1f9646d94e33e0296c9a4f3d41c1fcd5c23ea83e" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.353876 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.486254 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp"] Dec 01 19:25:25 crc kubenswrapper[4935]: E1201 19:25:25.487372 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="008102c1-54c3-4a58-8fec-a021793e2839" containerName="telemetry-power-monitoring-edpm-deployment-openstack-edpm-ipam" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.487401 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="008102c1-54c3-4a58-8fec-a021793e2839" containerName="telemetry-power-monitoring-edpm-deployment-openstack-edpm-ipam" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.487764 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="008102c1-54c3-4a58-8fec-a021793e2839" containerName="telemetry-power-monitoring-edpm-deployment-openstack-edpm-ipam" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.488795 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.495785 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"logging-compute-config-data" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.496138 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.496298 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2dh5" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.496351 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.496610 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.508663 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp"] Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.561349 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jg9v\" (UniqueName: \"kubernetes.io/projected/bb830086-fa18-46e6-877c-ab2bfbaea88b-kube-api-access-9jg9v\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.561754 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-logging-compute-config-data-1\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.562035 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-inventory\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.562104 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-ssh-key\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.562507 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-logging-compute-config-data-0\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.665129 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jg9v\" (UniqueName: \"kubernetes.io/projected/bb830086-fa18-46e6-877c-ab2bfbaea88b-kube-api-access-9jg9v\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.665303 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-logging-compute-config-data-1\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.665372 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-inventory\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.665405 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-ssh-key\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.665521 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-logging-compute-config-data-0\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.670046 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-inventory\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.670073 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-ssh-key\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.671123 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-logging-compute-config-data-1\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.677747 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-logging-compute-config-data-0\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.688936 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jg9v\" (UniqueName: \"kubernetes.io/projected/bb830086-fa18-46e6-877c-ab2bfbaea88b-kube-api-access-9jg9v\") pod \"logging-edpm-deployment-openstack-edpm-ipam-wh9xp\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.829621 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zdpjw"] Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.835452 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.846961 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.848834 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zdpjw"] Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.871239 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab966cca-8081-4f25-9efa-586c537ec3be-catalog-content\") pod \"redhat-marketplace-zdpjw\" (UID: \"ab966cca-8081-4f25-9efa-586c537ec3be\") " pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.871273 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzv5k\" (UniqueName: \"kubernetes.io/projected/ab966cca-8081-4f25-9efa-586c537ec3be-kube-api-access-xzv5k\") pod \"redhat-marketplace-zdpjw\" (UID: \"ab966cca-8081-4f25-9efa-586c537ec3be\") " pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.871332 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab966cca-8081-4f25-9efa-586c537ec3be-utilities\") pod \"redhat-marketplace-zdpjw\" (UID: \"ab966cca-8081-4f25-9efa-586c537ec3be\") " pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.978142 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab966cca-8081-4f25-9efa-586c537ec3be-catalog-content\") pod \"redhat-marketplace-zdpjw\" (UID: \"ab966cca-8081-4f25-9efa-586c537ec3be\") " pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.978426 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzv5k\" (UniqueName: \"kubernetes.io/projected/ab966cca-8081-4f25-9efa-586c537ec3be-kube-api-access-xzv5k\") pod \"redhat-marketplace-zdpjw\" (UID: \"ab966cca-8081-4f25-9efa-586c537ec3be\") " pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.978485 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab966cca-8081-4f25-9efa-586c537ec3be-utilities\") pod \"redhat-marketplace-zdpjw\" (UID: \"ab966cca-8081-4f25-9efa-586c537ec3be\") " pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.978910 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab966cca-8081-4f25-9efa-586c537ec3be-catalog-content\") pod \"redhat-marketplace-zdpjw\" (UID: \"ab966cca-8081-4f25-9efa-586c537ec3be\") " pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:25 crc kubenswrapper[4935]: I1201 19:25:25.978918 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab966cca-8081-4f25-9efa-586c537ec3be-utilities\") pod \"redhat-marketplace-zdpjw\" (UID: \"ab966cca-8081-4f25-9efa-586c537ec3be\") " pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.054242 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzv5k\" (UniqueName: \"kubernetes.io/projected/ab966cca-8081-4f25-9efa-586c537ec3be-kube-api-access-xzv5k\") pod \"redhat-marketplace-zdpjw\" (UID: \"ab966cca-8081-4f25-9efa-586c537ec3be\") " pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.091555 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hpm55"] Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.094911 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.148506 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hpm55"] Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.192496 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6b89p\" (UniqueName: \"kubernetes.io/projected/50067650-c466-473e-b59d-b0ca914e1cd1-kube-api-access-6b89p\") pod \"redhat-operators-hpm55\" (UID: \"50067650-c466-473e-b59d-b0ca914e1cd1\") " pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.192691 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50067650-c466-473e-b59d-b0ca914e1cd1-catalog-content\") pod \"redhat-operators-hpm55\" (UID: \"50067650-c466-473e-b59d-b0ca914e1cd1\") " pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.192789 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50067650-c466-473e-b59d-b0ca914e1cd1-utilities\") pod \"redhat-operators-hpm55\" (UID: \"50067650-c466-473e-b59d-b0ca914e1cd1\") " pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.294597 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6b89p\" (UniqueName: \"kubernetes.io/projected/50067650-c466-473e-b59d-b0ca914e1cd1-kube-api-access-6b89p\") pod \"redhat-operators-hpm55\" (UID: \"50067650-c466-473e-b59d-b0ca914e1cd1\") " pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.294726 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50067650-c466-473e-b59d-b0ca914e1cd1-catalog-content\") pod \"redhat-operators-hpm55\" (UID: \"50067650-c466-473e-b59d-b0ca914e1cd1\") " pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.294787 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50067650-c466-473e-b59d-b0ca914e1cd1-utilities\") pod \"redhat-operators-hpm55\" (UID: \"50067650-c466-473e-b59d-b0ca914e1cd1\") " pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.295386 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50067650-c466-473e-b59d-b0ca914e1cd1-utilities\") pod \"redhat-operators-hpm55\" (UID: \"50067650-c466-473e-b59d-b0ca914e1cd1\") " pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.295468 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50067650-c466-473e-b59d-b0ca914e1cd1-catalog-content\") pod \"redhat-operators-hpm55\" (UID: \"50067650-c466-473e-b59d-b0ca914e1cd1\") " pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.315403 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6b89p\" (UniqueName: \"kubernetes.io/projected/50067650-c466-473e-b59d-b0ca914e1cd1-kube-api-access-6b89p\") pod \"redhat-operators-hpm55\" (UID: \"50067650-c466-473e-b59d-b0ca914e1cd1\") " pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.325219 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.432821 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.531892 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp"] Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.850961 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zdpjw"] Dec 01 19:25:26 crc kubenswrapper[4935]: W1201 19:25:26.853582 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab966cca_8081_4f25_9efa_586c537ec3be.slice/crio-62839b9dbd3f080a5eb3fc65f7a78152b7d837739110d9f7606c643e241c8704 WatchSource:0}: Error finding container 62839b9dbd3f080a5eb3fc65f7a78152b7d837739110d9f7606c643e241c8704: Status 404 returned error can't find the container with id 62839b9dbd3f080a5eb3fc65f7a78152b7d837739110d9f7606c643e241c8704 Dec 01 19:25:26 crc kubenswrapper[4935]: I1201 19:25:26.969741 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hpm55"] Dec 01 19:25:27 crc kubenswrapper[4935]: I1201 19:25:27.385448 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" event={"ID":"bb830086-fa18-46e6-877c-ab2bfbaea88b","Type":"ContainerStarted","Data":"4cfe492ccc613d76287738f95632d52ae5bf2abb0cff16ab0a3a0ce811862dc8"} Dec 01 19:25:27 crc kubenswrapper[4935]: I1201 19:25:27.385714 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" event={"ID":"bb830086-fa18-46e6-877c-ab2bfbaea88b","Type":"ContainerStarted","Data":"481f736baefbe53349473c3270621963637cb0edf5c2806a55f67734aa34b0d3"} Dec 01 19:25:27 crc kubenswrapper[4935]: I1201 19:25:27.388383 4935 generic.go:334] "Generic (PLEG): container finished" podID="50067650-c466-473e-b59d-b0ca914e1cd1" containerID="75f97159a78f0944b526bd0cd1f9e149fd1732152d65344e00cc19ce1a0a7707" exitCode=0 Dec 01 19:25:27 crc kubenswrapper[4935]: I1201 19:25:27.388454 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hpm55" event={"ID":"50067650-c466-473e-b59d-b0ca914e1cd1","Type":"ContainerDied","Data":"75f97159a78f0944b526bd0cd1f9e149fd1732152d65344e00cc19ce1a0a7707"} Dec 01 19:25:27 crc kubenswrapper[4935]: I1201 19:25:27.388520 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hpm55" event={"ID":"50067650-c466-473e-b59d-b0ca914e1cd1","Type":"ContainerStarted","Data":"39a13f66286c94c73de4a9c04636b28ac0cb85f1eb2d48de256994b5e89f498e"} Dec 01 19:25:27 crc kubenswrapper[4935]: I1201 19:25:27.390712 4935 generic.go:334] "Generic (PLEG): container finished" podID="ab966cca-8081-4f25-9efa-586c537ec3be" containerID="502de78f42a47ad262ac8b54e6d88b137458ca3e47f00bba70d73db8f1f48dc0" exitCode=0 Dec 01 19:25:27 crc kubenswrapper[4935]: I1201 19:25:27.390767 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zdpjw" event={"ID":"ab966cca-8081-4f25-9efa-586c537ec3be","Type":"ContainerDied","Data":"502de78f42a47ad262ac8b54e6d88b137458ca3e47f00bba70d73db8f1f48dc0"} Dec 01 19:25:27 crc kubenswrapper[4935]: I1201 19:25:27.390796 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zdpjw" event={"ID":"ab966cca-8081-4f25-9efa-586c537ec3be","Type":"ContainerStarted","Data":"62839b9dbd3f080a5eb3fc65f7a78152b7d837739110d9f7606c643e241c8704"} Dec 01 19:25:27 crc kubenswrapper[4935]: I1201 19:25:27.471300 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" podStartSLOduration=1.953170873 podStartE2EDuration="2.471281174s" podCreationTimestamp="2025-12-01 19:25:25 +0000 UTC" firstStartedPulling="2025-12-01 19:25:26.545340655 +0000 UTC m=+3340.566969914" lastFinishedPulling="2025-12-01 19:25:27.063450956 +0000 UTC m=+3341.085080215" observedRunningTime="2025-12-01 19:25:27.449399651 +0000 UTC m=+3341.471028910" watchObservedRunningTime="2025-12-01 19:25:27.471281174 +0000 UTC m=+3341.492910433" Dec 01 19:25:29 crc kubenswrapper[4935]: I1201 19:25:29.419466 4935 generic.go:334] "Generic (PLEG): container finished" podID="ab966cca-8081-4f25-9efa-586c537ec3be" containerID="5646c67296607060a861b8914dccc901f884ad3e172ae9e59c516d4f72175deb" exitCode=0 Dec 01 19:25:29 crc kubenswrapper[4935]: I1201 19:25:29.420214 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zdpjw" event={"ID":"ab966cca-8081-4f25-9efa-586c537ec3be","Type":"ContainerDied","Data":"5646c67296607060a861b8914dccc901f884ad3e172ae9e59c516d4f72175deb"} Dec 01 19:25:30 crc kubenswrapper[4935]: I1201 19:25:30.442799 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zdpjw" event={"ID":"ab966cca-8081-4f25-9efa-586c537ec3be","Type":"ContainerStarted","Data":"1775a766f5add15511ef31ccc5e1da6cbbc2075838938c7d2a9ef44447cf00cb"} Dec 01 19:25:30 crc kubenswrapper[4935]: I1201 19:25:30.469395 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zdpjw" podStartSLOduration=3.011671571 podStartE2EDuration="5.469372228s" podCreationTimestamp="2025-12-01 19:25:25 +0000 UTC" firstStartedPulling="2025-12-01 19:25:27.393015815 +0000 UTC m=+3341.414645074" lastFinishedPulling="2025-12-01 19:25:29.850716472 +0000 UTC m=+3343.872345731" observedRunningTime="2025-12-01 19:25:30.457954517 +0000 UTC m=+3344.479583776" watchObservedRunningTime="2025-12-01 19:25:30.469372228 +0000 UTC m=+3344.491001487" Dec 01 19:25:36 crc kubenswrapper[4935]: I1201 19:25:36.325552 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:36 crc kubenswrapper[4935]: I1201 19:25:36.326066 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:36 crc kubenswrapper[4935]: I1201 19:25:36.374048 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:36 crc kubenswrapper[4935]: I1201 19:25:36.599629 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:36 crc kubenswrapper[4935]: I1201 19:25:36.675663 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zdpjw"] Dec 01 19:25:37 crc kubenswrapper[4935]: I1201 19:25:37.566183 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hpm55" event={"ID":"50067650-c466-473e-b59d-b0ca914e1cd1","Type":"ContainerStarted","Data":"23afd618aa896c479ee0102d17cd8103387db76a2c949ccee288ef896675b755"} Dec 01 19:25:38 crc kubenswrapper[4935]: I1201 19:25:38.579711 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zdpjw" podUID="ab966cca-8081-4f25-9efa-586c537ec3be" containerName="registry-server" containerID="cri-o://1775a766f5add15511ef31ccc5e1da6cbbc2075838938c7d2a9ef44447cf00cb" gracePeriod=2 Dec 01 19:25:40 crc kubenswrapper[4935]: I1201 19:25:40.606096 4935 generic.go:334] "Generic (PLEG): container finished" podID="50067650-c466-473e-b59d-b0ca914e1cd1" containerID="23afd618aa896c479ee0102d17cd8103387db76a2c949ccee288ef896675b755" exitCode=0 Dec 01 19:25:40 crc kubenswrapper[4935]: I1201 19:25:40.606184 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hpm55" event={"ID":"50067650-c466-473e-b59d-b0ca914e1cd1","Type":"ContainerDied","Data":"23afd618aa896c479ee0102d17cd8103387db76a2c949ccee288ef896675b755"} Dec 01 19:25:41 crc kubenswrapper[4935]: I1201 19:25:41.626903 4935 generic.go:334] "Generic (PLEG): container finished" podID="ab966cca-8081-4f25-9efa-586c537ec3be" containerID="1775a766f5add15511ef31ccc5e1da6cbbc2075838938c7d2a9ef44447cf00cb" exitCode=0 Dec 01 19:25:41 crc kubenswrapper[4935]: I1201 19:25:41.627183 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zdpjw" event={"ID":"ab966cca-8081-4f25-9efa-586c537ec3be","Type":"ContainerDied","Data":"1775a766f5add15511ef31ccc5e1da6cbbc2075838938c7d2a9ef44447cf00cb"} Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.074715 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.216870 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzv5k\" (UniqueName: \"kubernetes.io/projected/ab966cca-8081-4f25-9efa-586c537ec3be-kube-api-access-xzv5k\") pod \"ab966cca-8081-4f25-9efa-586c537ec3be\" (UID: \"ab966cca-8081-4f25-9efa-586c537ec3be\") " Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.217065 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab966cca-8081-4f25-9efa-586c537ec3be-catalog-content\") pod \"ab966cca-8081-4f25-9efa-586c537ec3be\" (UID: \"ab966cca-8081-4f25-9efa-586c537ec3be\") " Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.217204 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab966cca-8081-4f25-9efa-586c537ec3be-utilities\") pod \"ab966cca-8081-4f25-9efa-586c537ec3be\" (UID: \"ab966cca-8081-4f25-9efa-586c537ec3be\") " Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.218441 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab966cca-8081-4f25-9efa-586c537ec3be-utilities" (OuterVolumeSpecName: "utilities") pod "ab966cca-8081-4f25-9efa-586c537ec3be" (UID: "ab966cca-8081-4f25-9efa-586c537ec3be"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.227362 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab966cca-8081-4f25-9efa-586c537ec3be-kube-api-access-xzv5k" (OuterVolumeSpecName: "kube-api-access-xzv5k") pod "ab966cca-8081-4f25-9efa-586c537ec3be" (UID: "ab966cca-8081-4f25-9efa-586c537ec3be"). InnerVolumeSpecName "kube-api-access-xzv5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.246831 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab966cca-8081-4f25-9efa-586c537ec3be-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab966cca-8081-4f25-9efa-586c537ec3be" (UID: "ab966cca-8081-4f25-9efa-586c537ec3be"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.320091 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab966cca-8081-4f25-9efa-586c537ec3be-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.320146 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzv5k\" (UniqueName: \"kubernetes.io/projected/ab966cca-8081-4f25-9efa-586c537ec3be-kube-api-access-xzv5k\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.320189 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab966cca-8081-4f25-9efa-586c537ec3be-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.656214 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hpm55" event={"ID":"50067650-c466-473e-b59d-b0ca914e1cd1","Type":"ContainerStarted","Data":"12ccf835731d53d0d7801c1c376dbc213215dde330025e26787545e15dc0fcc7"} Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.661864 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zdpjw" event={"ID":"ab966cca-8081-4f25-9efa-586c537ec3be","Type":"ContainerDied","Data":"62839b9dbd3f080a5eb3fc65f7a78152b7d837739110d9f7606c643e241c8704"} Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.661973 4935 scope.go:117] "RemoveContainer" containerID="1775a766f5add15511ef31ccc5e1da6cbbc2075838938c7d2a9ef44447cf00cb" Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.662133 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zdpjw" Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.682188 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hpm55" podStartSLOduration=2.377712785 podStartE2EDuration="16.682146167s" podCreationTimestamp="2025-12-01 19:25:26 +0000 UTC" firstStartedPulling="2025-12-01 19:25:27.391551758 +0000 UTC m=+3341.413181017" lastFinishedPulling="2025-12-01 19:25:41.69598514 +0000 UTC m=+3355.717614399" observedRunningTime="2025-12-01 19:25:42.677440807 +0000 UTC m=+3356.699070076" watchObservedRunningTime="2025-12-01 19:25:42.682146167 +0000 UTC m=+3356.703775446" Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.704745 4935 scope.go:117] "RemoveContainer" containerID="5646c67296607060a861b8914dccc901f884ad3e172ae9e59c516d4f72175deb" Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.715008 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zdpjw"] Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.724938 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zdpjw"] Dec 01 19:25:42 crc kubenswrapper[4935]: I1201 19:25:42.729041 4935 scope.go:117] "RemoveContainer" containerID="502de78f42a47ad262ac8b54e6d88b137458ca3e47f00bba70d73db8f1f48dc0" Dec 01 19:25:43 crc kubenswrapper[4935]: I1201 19:25:43.676328 4935 generic.go:334] "Generic (PLEG): container finished" podID="bb830086-fa18-46e6-877c-ab2bfbaea88b" containerID="4cfe492ccc613d76287738f95632d52ae5bf2abb0cff16ab0a3a0ce811862dc8" exitCode=0 Dec 01 19:25:43 crc kubenswrapper[4935]: I1201 19:25:43.676379 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" event={"ID":"bb830086-fa18-46e6-877c-ab2bfbaea88b","Type":"ContainerDied","Data":"4cfe492ccc613d76287738f95632d52ae5bf2abb0cff16ab0a3a0ce811862dc8"} Dec 01 19:25:44 crc kubenswrapper[4935]: I1201 19:25:44.524580 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab966cca-8081-4f25-9efa-586c537ec3be" path="/var/lib/kubelet/pods/ab966cca-8081-4f25-9efa-586c537ec3be/volumes" Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.318909 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.387109 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-ssh-key\") pod \"bb830086-fa18-46e6-877c-ab2bfbaea88b\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.387407 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jg9v\" (UniqueName: \"kubernetes.io/projected/bb830086-fa18-46e6-877c-ab2bfbaea88b-kube-api-access-9jg9v\") pod \"bb830086-fa18-46e6-877c-ab2bfbaea88b\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.387483 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-logging-compute-config-data-1\") pod \"bb830086-fa18-46e6-877c-ab2bfbaea88b\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.387537 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-inventory\") pod \"bb830086-fa18-46e6-877c-ab2bfbaea88b\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.387584 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-logging-compute-config-data-0\") pod \"bb830086-fa18-46e6-877c-ab2bfbaea88b\" (UID: \"bb830086-fa18-46e6-877c-ab2bfbaea88b\") " Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.394409 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb830086-fa18-46e6-877c-ab2bfbaea88b-kube-api-access-9jg9v" (OuterVolumeSpecName: "kube-api-access-9jg9v") pod "bb830086-fa18-46e6-877c-ab2bfbaea88b" (UID: "bb830086-fa18-46e6-877c-ab2bfbaea88b"). InnerVolumeSpecName "kube-api-access-9jg9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.425621 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bb830086-fa18-46e6-877c-ab2bfbaea88b" (UID: "bb830086-fa18-46e6-877c-ab2bfbaea88b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.425995 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-inventory" (OuterVolumeSpecName: "inventory") pod "bb830086-fa18-46e6-877c-ab2bfbaea88b" (UID: "bb830086-fa18-46e6-877c-ab2bfbaea88b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.430988 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-logging-compute-config-data-0" (OuterVolumeSpecName: "logging-compute-config-data-0") pod "bb830086-fa18-46e6-877c-ab2bfbaea88b" (UID: "bb830086-fa18-46e6-877c-ab2bfbaea88b"). InnerVolumeSpecName "logging-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.448054 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-logging-compute-config-data-1" (OuterVolumeSpecName: "logging-compute-config-data-1") pod "bb830086-fa18-46e6-877c-ab2bfbaea88b" (UID: "bb830086-fa18-46e6-877c-ab2bfbaea88b"). InnerVolumeSpecName "logging-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.490265 4935 reconciler_common.go:293] "Volume detached for volume \"logging-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-logging-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.490307 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.490331 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jg9v\" (UniqueName: \"kubernetes.io/projected/bb830086-fa18-46e6-877c-ab2bfbaea88b-kube-api-access-9jg9v\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.490345 4935 reconciler_common.go:293] "Volume detached for volume \"logging-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-logging-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.490357 4935 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb830086-fa18-46e6-877c-ab2bfbaea88b-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.698472 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" event={"ID":"bb830086-fa18-46e6-877c-ab2bfbaea88b","Type":"ContainerDied","Data":"481f736baefbe53349473c3270621963637cb0edf5c2806a55f67734aa34b0d3"} Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.698749 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="481f736baefbe53349473c3270621963637cb0edf5c2806a55f67734aa34b0d3" Dec 01 19:25:45 crc kubenswrapper[4935]: I1201 19:25:45.698563 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/logging-edpm-deployment-openstack-edpm-ipam-wh9xp" Dec 01 19:25:46 crc kubenswrapper[4935]: I1201 19:25:46.435610 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:46 crc kubenswrapper[4935]: I1201 19:25:46.436778 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:47 crc kubenswrapper[4935]: I1201 19:25:47.499598 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hpm55" podUID="50067650-c466-473e-b59d-b0ca914e1cd1" containerName="registry-server" probeResult="failure" output=< Dec 01 19:25:47 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 19:25:47 crc kubenswrapper[4935]: > Dec 01 19:25:56 crc kubenswrapper[4935]: I1201 19:25:56.483752 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:25:56 crc kubenswrapper[4935]: I1201 19:25:56.537549 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hpm55" Dec 01 19:26:01 crc kubenswrapper[4935]: I1201 19:26:01.792766 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hpm55"] Dec 01 19:26:02 crc kubenswrapper[4935]: I1201 19:26:02.567613 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m2wlg"] Dec 01 19:26:02 crc kubenswrapper[4935]: I1201 19:26:02.568205 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m2wlg" podUID="fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" containerName="registry-server" containerID="cri-o://ce7ca132af5359fe00fde65e74163833180045bd9769e12ff48e8c940997b511" gracePeriod=2 Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.233998 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.337536 4935 generic.go:334] "Generic (PLEG): container finished" podID="fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" containerID="ce7ca132af5359fe00fde65e74163833180045bd9769e12ff48e8c940997b511" exitCode=0 Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.337640 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m2wlg" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.337678 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2wlg" event={"ID":"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0","Type":"ContainerDied","Data":"ce7ca132af5359fe00fde65e74163833180045bd9769e12ff48e8c940997b511"} Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.337741 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2wlg" event={"ID":"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0","Type":"ContainerDied","Data":"75b050cc7291978bcd68ed84c5a0887c50b22b06f49c2130d9159c3543193b09"} Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.337758 4935 scope.go:117] "RemoveContainer" containerID="ce7ca132af5359fe00fde65e74163833180045bd9769e12ff48e8c940997b511" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.358562 4935 scope.go:117] "RemoveContainer" containerID="770bfd82cf1ad496c31e78cf16061fea951ea28b36556b36ace8380ccb15d0bf" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.366785 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-catalog-content\") pod \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\" (UID: \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\") " Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.366903 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-utilities\") pod \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\" (UID: \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\") " Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.367113 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctpwp\" (UniqueName: \"kubernetes.io/projected/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-kube-api-access-ctpwp\") pod \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\" (UID: \"fecb7ac3-81ed-4051-8e39-fca4b8f7fae0\") " Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.370961 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-utilities" (OuterVolumeSpecName: "utilities") pod "fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" (UID: "fecb7ac3-81ed-4051-8e39-fca4b8f7fae0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.376169 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-kube-api-access-ctpwp" (OuterVolumeSpecName: "kube-api-access-ctpwp") pod "fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" (UID: "fecb7ac3-81ed-4051-8e39-fca4b8f7fae0"). InnerVolumeSpecName "kube-api-access-ctpwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.381751 4935 scope.go:117] "RemoveContainer" containerID="1c4dfedcf2629930d510a4e83f2d05b36bb46122516c498b74319615ef810fe1" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.469768 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctpwp\" (UniqueName: \"kubernetes.io/projected/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-kube-api-access-ctpwp\") on node \"crc\" DevicePath \"\"" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.469801 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.502118 4935 scope.go:117] "RemoveContainer" containerID="ce7ca132af5359fe00fde65e74163833180045bd9769e12ff48e8c940997b511" Dec 01 19:26:03 crc kubenswrapper[4935]: E1201 19:26:03.502850 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce7ca132af5359fe00fde65e74163833180045bd9769e12ff48e8c940997b511\": container with ID starting with ce7ca132af5359fe00fde65e74163833180045bd9769e12ff48e8c940997b511 not found: ID does not exist" containerID="ce7ca132af5359fe00fde65e74163833180045bd9769e12ff48e8c940997b511" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.503021 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce7ca132af5359fe00fde65e74163833180045bd9769e12ff48e8c940997b511"} err="failed to get container status \"ce7ca132af5359fe00fde65e74163833180045bd9769e12ff48e8c940997b511\": rpc error: code = NotFound desc = could not find container \"ce7ca132af5359fe00fde65e74163833180045bd9769e12ff48e8c940997b511\": container with ID starting with ce7ca132af5359fe00fde65e74163833180045bd9769e12ff48e8c940997b511 not found: ID does not exist" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.503097 4935 scope.go:117] "RemoveContainer" containerID="770bfd82cf1ad496c31e78cf16061fea951ea28b36556b36ace8380ccb15d0bf" Dec 01 19:26:03 crc kubenswrapper[4935]: E1201 19:26:03.503429 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"770bfd82cf1ad496c31e78cf16061fea951ea28b36556b36ace8380ccb15d0bf\": container with ID starting with 770bfd82cf1ad496c31e78cf16061fea951ea28b36556b36ace8380ccb15d0bf not found: ID does not exist" containerID="770bfd82cf1ad496c31e78cf16061fea951ea28b36556b36ace8380ccb15d0bf" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.503753 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"770bfd82cf1ad496c31e78cf16061fea951ea28b36556b36ace8380ccb15d0bf"} err="failed to get container status \"770bfd82cf1ad496c31e78cf16061fea951ea28b36556b36ace8380ccb15d0bf\": rpc error: code = NotFound desc = could not find container \"770bfd82cf1ad496c31e78cf16061fea951ea28b36556b36ace8380ccb15d0bf\": container with ID starting with 770bfd82cf1ad496c31e78cf16061fea951ea28b36556b36ace8380ccb15d0bf not found: ID does not exist" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.503826 4935 scope.go:117] "RemoveContainer" containerID="1c4dfedcf2629930d510a4e83f2d05b36bb46122516c498b74319615ef810fe1" Dec 01 19:26:03 crc kubenswrapper[4935]: E1201 19:26:03.504226 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c4dfedcf2629930d510a4e83f2d05b36bb46122516c498b74319615ef810fe1\": container with ID starting with 1c4dfedcf2629930d510a4e83f2d05b36bb46122516c498b74319615ef810fe1 not found: ID does not exist" containerID="1c4dfedcf2629930d510a4e83f2d05b36bb46122516c498b74319615ef810fe1" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.504268 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c4dfedcf2629930d510a4e83f2d05b36bb46122516c498b74319615ef810fe1"} err="failed to get container status \"1c4dfedcf2629930d510a4e83f2d05b36bb46122516c498b74319615ef810fe1\": rpc error: code = NotFound desc = could not find container \"1c4dfedcf2629930d510a4e83f2d05b36bb46122516c498b74319615ef810fe1\": container with ID starting with 1c4dfedcf2629930d510a4e83f2d05b36bb46122516c498b74319615ef810fe1 not found: ID does not exist" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.533260 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" (UID: "fecb7ac3-81ed-4051-8e39-fca4b8f7fae0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.572384 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.676455 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m2wlg"] Dec 01 19:26:03 crc kubenswrapper[4935]: I1201 19:26:03.686358 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m2wlg"] Dec 01 19:26:04 crc kubenswrapper[4935]: I1201 19:26:04.523548 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" path="/var/lib/kubelet/pods/fecb7ac3-81ed-4051-8e39-fca4b8f7fae0/volumes" Dec 01 19:26:24 crc kubenswrapper[4935]: I1201 19:26:24.346385 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:26:24 crc kubenswrapper[4935]: I1201 19:26:24.347363 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:26:54 crc kubenswrapper[4935]: I1201 19:26:54.345686 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:26:54 crc kubenswrapper[4935]: I1201 19:26:54.346186 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:27:24 crc kubenswrapper[4935]: I1201 19:27:24.347079 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:27:24 crc kubenswrapper[4935]: I1201 19:27:24.347950 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:27:24 crc kubenswrapper[4935]: I1201 19:27:24.348027 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 19:27:24 crc kubenswrapper[4935]: I1201 19:27:24.349051 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"47200a92c9791b4df1cc46340078b999422d5ea623d96f2ed60b96d61034521a"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:27:24 crc kubenswrapper[4935]: I1201 19:27:24.349187 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://47200a92c9791b4df1cc46340078b999422d5ea623d96f2ed60b96d61034521a" gracePeriod=600 Dec 01 19:27:24 crc kubenswrapper[4935]: I1201 19:27:24.527909 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="47200a92c9791b4df1cc46340078b999422d5ea623d96f2ed60b96d61034521a" exitCode=0 Dec 01 19:27:24 crc kubenswrapper[4935]: I1201 19:27:24.528970 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"47200a92c9791b4df1cc46340078b999422d5ea623d96f2ed60b96d61034521a"} Dec 01 19:27:24 crc kubenswrapper[4935]: I1201 19:27:24.529061 4935 scope.go:117] "RemoveContainer" containerID="9366edd67c409f6fee22366408bb8e147cc84994daecd587705ba387b792afdf" Dec 01 19:27:25 crc kubenswrapper[4935]: I1201 19:27:25.550032 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416"} Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.041741 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b5bp2"] Dec 01 19:29:05 crc kubenswrapper[4935]: E1201 19:29:05.042956 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab966cca-8081-4f25-9efa-586c537ec3be" containerName="extract-utilities" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.042977 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab966cca-8081-4f25-9efa-586c537ec3be" containerName="extract-utilities" Dec 01 19:29:05 crc kubenswrapper[4935]: E1201 19:29:05.043015 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab966cca-8081-4f25-9efa-586c537ec3be" containerName="registry-server" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.043023 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab966cca-8081-4f25-9efa-586c537ec3be" containerName="registry-server" Dec 01 19:29:05 crc kubenswrapper[4935]: E1201 19:29:05.043046 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab966cca-8081-4f25-9efa-586c537ec3be" containerName="extract-content" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.043054 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab966cca-8081-4f25-9efa-586c537ec3be" containerName="extract-content" Dec 01 19:29:05 crc kubenswrapper[4935]: E1201 19:29:05.043083 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" containerName="extract-content" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.043091 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" containerName="extract-content" Dec 01 19:29:05 crc kubenswrapper[4935]: E1201 19:29:05.043110 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" containerName="extract-utilities" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.043117 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" containerName="extract-utilities" Dec 01 19:29:05 crc kubenswrapper[4935]: E1201 19:29:05.043133 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" containerName="registry-server" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.043163 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" containerName="registry-server" Dec 01 19:29:05 crc kubenswrapper[4935]: E1201 19:29:05.043188 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb830086-fa18-46e6-877c-ab2bfbaea88b" containerName="logging-edpm-deployment-openstack-edpm-ipam" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.043197 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb830086-fa18-46e6-877c-ab2bfbaea88b" containerName="logging-edpm-deployment-openstack-edpm-ipam" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.043503 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab966cca-8081-4f25-9efa-586c537ec3be" containerName="registry-server" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.043550 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb830086-fa18-46e6-877c-ab2bfbaea88b" containerName="logging-edpm-deployment-openstack-edpm-ipam" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.043567 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="fecb7ac3-81ed-4051-8e39-fca4b8f7fae0" containerName="registry-server" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.045860 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.065268 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b5bp2"] Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.163003 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57c06919-d4ee-4399-9f05-b81eb5707f3f-utilities\") pod \"community-operators-b5bp2\" (UID: \"57c06919-d4ee-4399-9f05-b81eb5707f3f\") " pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.163123 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hqgk\" (UniqueName: \"kubernetes.io/projected/57c06919-d4ee-4399-9f05-b81eb5707f3f-kube-api-access-4hqgk\") pod \"community-operators-b5bp2\" (UID: \"57c06919-d4ee-4399-9f05-b81eb5707f3f\") " pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.163273 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57c06919-d4ee-4399-9f05-b81eb5707f3f-catalog-content\") pod \"community-operators-b5bp2\" (UID: \"57c06919-d4ee-4399-9f05-b81eb5707f3f\") " pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.264918 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hqgk\" (UniqueName: \"kubernetes.io/projected/57c06919-d4ee-4399-9f05-b81eb5707f3f-kube-api-access-4hqgk\") pod \"community-operators-b5bp2\" (UID: \"57c06919-d4ee-4399-9f05-b81eb5707f3f\") " pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.265094 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57c06919-d4ee-4399-9f05-b81eb5707f3f-catalog-content\") pod \"community-operators-b5bp2\" (UID: \"57c06919-d4ee-4399-9f05-b81eb5707f3f\") " pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.265127 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57c06919-d4ee-4399-9f05-b81eb5707f3f-utilities\") pod \"community-operators-b5bp2\" (UID: \"57c06919-d4ee-4399-9f05-b81eb5707f3f\") " pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.265650 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57c06919-d4ee-4399-9f05-b81eb5707f3f-catalog-content\") pod \"community-operators-b5bp2\" (UID: \"57c06919-d4ee-4399-9f05-b81eb5707f3f\") " pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.265680 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57c06919-d4ee-4399-9f05-b81eb5707f3f-utilities\") pod \"community-operators-b5bp2\" (UID: \"57c06919-d4ee-4399-9f05-b81eb5707f3f\") " pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.286496 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hqgk\" (UniqueName: \"kubernetes.io/projected/57c06919-d4ee-4399-9f05-b81eb5707f3f-kube-api-access-4hqgk\") pod \"community-operators-b5bp2\" (UID: \"57c06919-d4ee-4399-9f05-b81eb5707f3f\") " pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.371040 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.913039 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b5bp2"] Dec 01 19:29:05 crc kubenswrapper[4935]: I1201 19:29:05.978613 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5bp2" event={"ID":"57c06919-d4ee-4399-9f05-b81eb5707f3f","Type":"ContainerStarted","Data":"1b584902cac10e687eadb815f1fdc115dd4752ec89c08a138dd52ac32af8a537"} Dec 01 19:29:06 crc kubenswrapper[4935]: I1201 19:29:06.991392 4935 generic.go:334] "Generic (PLEG): container finished" podID="57c06919-d4ee-4399-9f05-b81eb5707f3f" containerID="9abd1173c86ad3b6e562162961083ebea0dc4dc98a8af336b3cf36cafc388d75" exitCode=0 Dec 01 19:29:06 crc kubenswrapper[4935]: I1201 19:29:06.991456 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5bp2" event={"ID":"57c06919-d4ee-4399-9f05-b81eb5707f3f","Type":"ContainerDied","Data":"9abd1173c86ad3b6e562162961083ebea0dc4dc98a8af336b3cf36cafc388d75"} Dec 01 19:29:06 crc kubenswrapper[4935]: I1201 19:29:06.994678 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 19:29:09 crc kubenswrapper[4935]: I1201 19:29:09.041434 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5bp2" event={"ID":"57c06919-d4ee-4399-9f05-b81eb5707f3f","Type":"ContainerStarted","Data":"11da1b167f31d130b6afbf9f320439d77aa77be90585e42715f94e57a17ab67b"} Dec 01 19:29:10 crc kubenswrapper[4935]: I1201 19:29:10.056274 4935 generic.go:334] "Generic (PLEG): container finished" podID="57c06919-d4ee-4399-9f05-b81eb5707f3f" containerID="11da1b167f31d130b6afbf9f320439d77aa77be90585e42715f94e57a17ab67b" exitCode=0 Dec 01 19:29:10 crc kubenswrapper[4935]: I1201 19:29:10.056408 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5bp2" event={"ID":"57c06919-d4ee-4399-9f05-b81eb5707f3f","Type":"ContainerDied","Data":"11da1b167f31d130b6afbf9f320439d77aa77be90585e42715f94e57a17ab67b"} Dec 01 19:29:10 crc kubenswrapper[4935]: I1201 19:29:10.986579 4935 scope.go:117] "RemoveContainer" containerID="74ebd85d9ccade4f20634bab0a0f4cba7356d2ed508f38ce5274b44330e89bcf" Dec 01 19:29:11 crc kubenswrapper[4935]: I1201 19:29:11.018644 4935 scope.go:117] "RemoveContainer" containerID="ad4a8eb5b5155fb385015bd21a4944077d84f8a9c41b7cbd245e5fd5ee670af0" Dec 01 19:29:11 crc kubenswrapper[4935]: I1201 19:29:11.047652 4935 scope.go:117] "RemoveContainer" containerID="b8a4cf34e7d857107e588796501539c62e15593d5aad0d3619c3831fc7bcfdc1" Dec 01 19:29:11 crc kubenswrapper[4935]: I1201 19:29:11.075567 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5bp2" event={"ID":"57c06919-d4ee-4399-9f05-b81eb5707f3f","Type":"ContainerStarted","Data":"8c6696eccd1799c3a1af34abe7675c47996369d80ab108e635349c10349f04b6"} Dec 01 19:29:11 crc kubenswrapper[4935]: I1201 19:29:11.123286 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b5bp2" podStartSLOduration=2.4841959559999998 podStartE2EDuration="6.123264744s" podCreationTimestamp="2025-12-01 19:29:05 +0000 UTC" firstStartedPulling="2025-12-01 19:29:06.994400012 +0000 UTC m=+3561.016029271" lastFinishedPulling="2025-12-01 19:29:10.6334688 +0000 UTC m=+3564.655098059" observedRunningTime="2025-12-01 19:29:11.101311899 +0000 UTC m=+3565.122941198" watchObservedRunningTime="2025-12-01 19:29:11.123264744 +0000 UTC m=+3565.144894013" Dec 01 19:29:15 crc kubenswrapper[4935]: I1201 19:29:15.371775 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:15 crc kubenswrapper[4935]: I1201 19:29:15.372470 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:15 crc kubenswrapper[4935]: I1201 19:29:15.475227 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:16 crc kubenswrapper[4935]: I1201 19:29:16.224848 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:16 crc kubenswrapper[4935]: I1201 19:29:16.277587 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b5bp2"] Dec 01 19:29:18 crc kubenswrapper[4935]: I1201 19:29:18.188638 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-b5bp2" podUID="57c06919-d4ee-4399-9f05-b81eb5707f3f" containerName="registry-server" containerID="cri-o://8c6696eccd1799c3a1af34abe7675c47996369d80ab108e635349c10349f04b6" gracePeriod=2 Dec 01 19:29:18 crc kubenswrapper[4935]: I1201 19:29:18.740835 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:18 crc kubenswrapper[4935]: I1201 19:29:18.843654 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hqgk\" (UniqueName: \"kubernetes.io/projected/57c06919-d4ee-4399-9f05-b81eb5707f3f-kube-api-access-4hqgk\") pod \"57c06919-d4ee-4399-9f05-b81eb5707f3f\" (UID: \"57c06919-d4ee-4399-9f05-b81eb5707f3f\") " Dec 01 19:29:18 crc kubenswrapper[4935]: I1201 19:29:18.843747 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57c06919-d4ee-4399-9f05-b81eb5707f3f-utilities\") pod \"57c06919-d4ee-4399-9f05-b81eb5707f3f\" (UID: \"57c06919-d4ee-4399-9f05-b81eb5707f3f\") " Dec 01 19:29:18 crc kubenswrapper[4935]: I1201 19:29:18.843828 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57c06919-d4ee-4399-9f05-b81eb5707f3f-catalog-content\") pod \"57c06919-d4ee-4399-9f05-b81eb5707f3f\" (UID: \"57c06919-d4ee-4399-9f05-b81eb5707f3f\") " Dec 01 19:29:18 crc kubenswrapper[4935]: I1201 19:29:18.845779 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57c06919-d4ee-4399-9f05-b81eb5707f3f-utilities" (OuterVolumeSpecName: "utilities") pod "57c06919-d4ee-4399-9f05-b81eb5707f3f" (UID: "57c06919-d4ee-4399-9f05-b81eb5707f3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:29:18 crc kubenswrapper[4935]: I1201 19:29:18.850127 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57c06919-d4ee-4399-9f05-b81eb5707f3f-kube-api-access-4hqgk" (OuterVolumeSpecName: "kube-api-access-4hqgk") pod "57c06919-d4ee-4399-9f05-b81eb5707f3f" (UID: "57c06919-d4ee-4399-9f05-b81eb5707f3f"). InnerVolumeSpecName "kube-api-access-4hqgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:29:18 crc kubenswrapper[4935]: I1201 19:29:18.923814 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57c06919-d4ee-4399-9f05-b81eb5707f3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57c06919-d4ee-4399-9f05-b81eb5707f3f" (UID: "57c06919-d4ee-4399-9f05-b81eb5707f3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:29:18 crc kubenswrapper[4935]: I1201 19:29:18.946878 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hqgk\" (UniqueName: \"kubernetes.io/projected/57c06919-d4ee-4399-9f05-b81eb5707f3f-kube-api-access-4hqgk\") on node \"crc\" DevicePath \"\"" Dec 01 19:29:18 crc kubenswrapper[4935]: I1201 19:29:18.946928 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57c06919-d4ee-4399-9f05-b81eb5707f3f-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:29:18 crc kubenswrapper[4935]: I1201 19:29:18.946950 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57c06919-d4ee-4399-9f05-b81eb5707f3f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.202573 4935 generic.go:334] "Generic (PLEG): container finished" podID="57c06919-d4ee-4399-9f05-b81eb5707f3f" containerID="8c6696eccd1799c3a1af34abe7675c47996369d80ab108e635349c10349f04b6" exitCode=0 Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.202621 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5bp2" event={"ID":"57c06919-d4ee-4399-9f05-b81eb5707f3f","Type":"ContainerDied","Data":"8c6696eccd1799c3a1af34abe7675c47996369d80ab108e635349c10349f04b6"} Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.202652 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5bp2" event={"ID":"57c06919-d4ee-4399-9f05-b81eb5707f3f","Type":"ContainerDied","Data":"1b584902cac10e687eadb815f1fdc115dd4752ec89c08a138dd52ac32af8a537"} Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.202669 4935 scope.go:117] "RemoveContainer" containerID="8c6696eccd1799c3a1af34abe7675c47996369d80ab108e635349c10349f04b6" Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.202671 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b5bp2" Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.239462 4935 scope.go:117] "RemoveContainer" containerID="11da1b167f31d130b6afbf9f320439d77aa77be90585e42715f94e57a17ab67b" Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.275641 4935 scope.go:117] "RemoveContainer" containerID="9abd1173c86ad3b6e562162961083ebea0dc4dc98a8af336b3cf36cafc388d75" Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.279968 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b5bp2"] Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.296062 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-b5bp2"] Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.360364 4935 scope.go:117] "RemoveContainer" containerID="8c6696eccd1799c3a1af34abe7675c47996369d80ab108e635349c10349f04b6" Dec 01 19:29:19 crc kubenswrapper[4935]: E1201 19:29:19.361092 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c6696eccd1799c3a1af34abe7675c47996369d80ab108e635349c10349f04b6\": container with ID starting with 8c6696eccd1799c3a1af34abe7675c47996369d80ab108e635349c10349f04b6 not found: ID does not exist" containerID="8c6696eccd1799c3a1af34abe7675c47996369d80ab108e635349c10349f04b6" Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.361231 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c6696eccd1799c3a1af34abe7675c47996369d80ab108e635349c10349f04b6"} err="failed to get container status \"8c6696eccd1799c3a1af34abe7675c47996369d80ab108e635349c10349f04b6\": rpc error: code = NotFound desc = could not find container \"8c6696eccd1799c3a1af34abe7675c47996369d80ab108e635349c10349f04b6\": container with ID starting with 8c6696eccd1799c3a1af34abe7675c47996369d80ab108e635349c10349f04b6 not found: ID does not exist" Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.361316 4935 scope.go:117] "RemoveContainer" containerID="11da1b167f31d130b6afbf9f320439d77aa77be90585e42715f94e57a17ab67b" Dec 01 19:29:19 crc kubenswrapper[4935]: E1201 19:29:19.361708 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11da1b167f31d130b6afbf9f320439d77aa77be90585e42715f94e57a17ab67b\": container with ID starting with 11da1b167f31d130b6afbf9f320439d77aa77be90585e42715f94e57a17ab67b not found: ID does not exist" containerID="11da1b167f31d130b6afbf9f320439d77aa77be90585e42715f94e57a17ab67b" Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.361808 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11da1b167f31d130b6afbf9f320439d77aa77be90585e42715f94e57a17ab67b"} err="failed to get container status \"11da1b167f31d130b6afbf9f320439d77aa77be90585e42715f94e57a17ab67b\": rpc error: code = NotFound desc = could not find container \"11da1b167f31d130b6afbf9f320439d77aa77be90585e42715f94e57a17ab67b\": container with ID starting with 11da1b167f31d130b6afbf9f320439d77aa77be90585e42715f94e57a17ab67b not found: ID does not exist" Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.361879 4935 scope.go:117] "RemoveContainer" containerID="9abd1173c86ad3b6e562162961083ebea0dc4dc98a8af336b3cf36cafc388d75" Dec 01 19:29:19 crc kubenswrapper[4935]: E1201 19:29:19.365018 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9abd1173c86ad3b6e562162961083ebea0dc4dc98a8af336b3cf36cafc388d75\": container with ID starting with 9abd1173c86ad3b6e562162961083ebea0dc4dc98a8af336b3cf36cafc388d75 not found: ID does not exist" containerID="9abd1173c86ad3b6e562162961083ebea0dc4dc98a8af336b3cf36cafc388d75" Dec 01 19:29:19 crc kubenswrapper[4935]: I1201 19:29:19.365204 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9abd1173c86ad3b6e562162961083ebea0dc4dc98a8af336b3cf36cafc388d75"} err="failed to get container status \"9abd1173c86ad3b6e562162961083ebea0dc4dc98a8af336b3cf36cafc388d75\": rpc error: code = NotFound desc = could not find container \"9abd1173c86ad3b6e562162961083ebea0dc4dc98a8af336b3cf36cafc388d75\": container with ID starting with 9abd1173c86ad3b6e562162961083ebea0dc4dc98a8af336b3cf36cafc388d75 not found: ID does not exist" Dec 01 19:29:20 crc kubenswrapper[4935]: I1201 19:29:20.531314 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57c06919-d4ee-4399-9f05-b81eb5707f3f" path="/var/lib/kubelet/pods/57c06919-d4ee-4399-9f05-b81eb5707f3f/volumes" Dec 01 19:29:24 crc kubenswrapper[4935]: I1201 19:29:24.346053 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:29:24 crc kubenswrapper[4935]: I1201 19:29:24.348195 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:29:54 crc kubenswrapper[4935]: I1201 19:29:54.346196 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:29:54 crc kubenswrapper[4935]: I1201 19:29:54.346746 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.162835 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4"] Dec 01 19:30:00 crc kubenswrapper[4935]: E1201 19:30:00.163923 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57c06919-d4ee-4399-9f05-b81eb5707f3f" containerName="extract-content" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.163937 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="57c06919-d4ee-4399-9f05-b81eb5707f3f" containerName="extract-content" Dec 01 19:30:00 crc kubenswrapper[4935]: E1201 19:30:00.163973 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57c06919-d4ee-4399-9f05-b81eb5707f3f" containerName="registry-server" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.163980 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="57c06919-d4ee-4399-9f05-b81eb5707f3f" containerName="registry-server" Dec 01 19:30:00 crc kubenswrapper[4935]: E1201 19:30:00.163993 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57c06919-d4ee-4399-9f05-b81eb5707f3f" containerName="extract-utilities" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.164000 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="57c06919-d4ee-4399-9f05-b81eb5707f3f" containerName="extract-utilities" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.164251 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="57c06919-d4ee-4399-9f05-b81eb5707f3f" containerName="registry-server" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.165081 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.167473 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.167705 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.179330 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4"] Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.299112 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c4417c7-b634-4c6f-8609-1777156a606a-config-volume\") pod \"collect-profiles-29410290-2rbm4\" (UID: \"2c4417c7-b634-4c6f-8609-1777156a606a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.299178 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c4417c7-b634-4c6f-8609-1777156a606a-secret-volume\") pod \"collect-profiles-29410290-2rbm4\" (UID: \"2c4417c7-b634-4c6f-8609-1777156a606a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.299426 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xd599\" (UniqueName: \"kubernetes.io/projected/2c4417c7-b634-4c6f-8609-1777156a606a-kube-api-access-xd599\") pod \"collect-profiles-29410290-2rbm4\" (UID: \"2c4417c7-b634-4c6f-8609-1777156a606a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.401973 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xd599\" (UniqueName: \"kubernetes.io/projected/2c4417c7-b634-4c6f-8609-1777156a606a-kube-api-access-xd599\") pod \"collect-profiles-29410290-2rbm4\" (UID: \"2c4417c7-b634-4c6f-8609-1777156a606a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.402158 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c4417c7-b634-4c6f-8609-1777156a606a-config-volume\") pod \"collect-profiles-29410290-2rbm4\" (UID: \"2c4417c7-b634-4c6f-8609-1777156a606a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.402197 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c4417c7-b634-4c6f-8609-1777156a606a-secret-volume\") pod \"collect-profiles-29410290-2rbm4\" (UID: \"2c4417c7-b634-4c6f-8609-1777156a606a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.403006 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c4417c7-b634-4c6f-8609-1777156a606a-config-volume\") pod \"collect-profiles-29410290-2rbm4\" (UID: \"2c4417c7-b634-4c6f-8609-1777156a606a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.409868 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c4417c7-b634-4c6f-8609-1777156a606a-secret-volume\") pod \"collect-profiles-29410290-2rbm4\" (UID: \"2c4417c7-b634-4c6f-8609-1777156a606a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.424653 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xd599\" (UniqueName: \"kubernetes.io/projected/2c4417c7-b634-4c6f-8609-1777156a606a-kube-api-access-xd599\") pod \"collect-profiles-29410290-2rbm4\" (UID: \"2c4417c7-b634-4c6f-8609-1777156a606a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.501645 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" Dec 01 19:30:00 crc kubenswrapper[4935]: I1201 19:30:00.973761 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4"] Dec 01 19:30:01 crc kubenswrapper[4935]: I1201 19:30:01.766292 4935 generic.go:334] "Generic (PLEG): container finished" podID="2c4417c7-b634-4c6f-8609-1777156a606a" containerID="5752f3659847b12d6f45318d9f69a4b0dcb70bbd430206193d3e93c462efd247" exitCode=0 Dec 01 19:30:01 crc kubenswrapper[4935]: I1201 19:30:01.766536 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" event={"ID":"2c4417c7-b634-4c6f-8609-1777156a606a","Type":"ContainerDied","Data":"5752f3659847b12d6f45318d9f69a4b0dcb70bbd430206193d3e93c462efd247"} Dec 01 19:30:01 crc kubenswrapper[4935]: I1201 19:30:01.767171 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" event={"ID":"2c4417c7-b634-4c6f-8609-1777156a606a","Type":"ContainerStarted","Data":"56206ba36dc05f5236db4893f34b6b598d0bb66266f9781cccf16f70f7363c4a"} Dec 01 19:30:03 crc kubenswrapper[4935]: I1201 19:30:03.231387 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" Dec 01 19:30:03 crc kubenswrapper[4935]: I1201 19:30:03.368683 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c4417c7-b634-4c6f-8609-1777156a606a-config-volume\") pod \"2c4417c7-b634-4c6f-8609-1777156a606a\" (UID: \"2c4417c7-b634-4c6f-8609-1777156a606a\") " Dec 01 19:30:03 crc kubenswrapper[4935]: I1201 19:30:03.369060 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c4417c7-b634-4c6f-8609-1777156a606a-secret-volume\") pod \"2c4417c7-b634-4c6f-8609-1777156a606a\" (UID: \"2c4417c7-b634-4c6f-8609-1777156a606a\") " Dec 01 19:30:03 crc kubenswrapper[4935]: I1201 19:30:03.369392 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xd599\" (UniqueName: \"kubernetes.io/projected/2c4417c7-b634-4c6f-8609-1777156a606a-kube-api-access-xd599\") pod \"2c4417c7-b634-4c6f-8609-1777156a606a\" (UID: \"2c4417c7-b634-4c6f-8609-1777156a606a\") " Dec 01 19:30:03 crc kubenswrapper[4935]: I1201 19:30:03.369403 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c4417c7-b634-4c6f-8609-1777156a606a-config-volume" (OuterVolumeSpecName: "config-volume") pod "2c4417c7-b634-4c6f-8609-1777156a606a" (UID: "2c4417c7-b634-4c6f-8609-1777156a606a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:30:03 crc kubenswrapper[4935]: I1201 19:30:03.370287 4935 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2c4417c7-b634-4c6f-8609-1777156a606a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 19:30:03 crc kubenswrapper[4935]: I1201 19:30:03.376462 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c4417c7-b634-4c6f-8609-1777156a606a-kube-api-access-xd599" (OuterVolumeSpecName: "kube-api-access-xd599") pod "2c4417c7-b634-4c6f-8609-1777156a606a" (UID: "2c4417c7-b634-4c6f-8609-1777156a606a"). InnerVolumeSpecName "kube-api-access-xd599". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:30:03 crc kubenswrapper[4935]: I1201 19:30:03.376688 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c4417c7-b634-4c6f-8609-1777156a606a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2c4417c7-b634-4c6f-8609-1777156a606a" (UID: "2c4417c7-b634-4c6f-8609-1777156a606a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:30:03 crc kubenswrapper[4935]: I1201 19:30:03.472727 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xd599\" (UniqueName: \"kubernetes.io/projected/2c4417c7-b634-4c6f-8609-1777156a606a-kube-api-access-xd599\") on node \"crc\" DevicePath \"\"" Dec 01 19:30:03 crc kubenswrapper[4935]: I1201 19:30:03.472758 4935 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2c4417c7-b634-4c6f-8609-1777156a606a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 19:30:03 crc kubenswrapper[4935]: I1201 19:30:03.822020 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" event={"ID":"2c4417c7-b634-4c6f-8609-1777156a606a","Type":"ContainerDied","Data":"56206ba36dc05f5236db4893f34b6b598d0bb66266f9781cccf16f70f7363c4a"} Dec 01 19:30:03 crc kubenswrapper[4935]: I1201 19:30:03.822059 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56206ba36dc05f5236db4893f34b6b598d0bb66266f9781cccf16f70f7363c4a" Dec 01 19:30:03 crc kubenswrapper[4935]: I1201 19:30:03.822694 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4" Dec 01 19:30:04 crc kubenswrapper[4935]: I1201 19:30:04.320812 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg"] Dec 01 19:30:04 crc kubenswrapper[4935]: I1201 19:30:04.330371 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410245-fgncg"] Dec 01 19:30:04 crc kubenswrapper[4935]: I1201 19:30:04.526093 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0b46824-03b2-40d6-b1bf-31efd83aaf80" path="/var/lib/kubelet/pods/c0b46824-03b2-40d6-b1bf-31efd83aaf80/volumes" Dec 01 19:30:11 crc kubenswrapper[4935]: I1201 19:30:11.164707 4935 scope.go:117] "RemoveContainer" containerID="63fe31c853dc4de430febf9463ea9fe37c2b4f6b9b4e2cee0233fbaecc3ca5dd" Dec 01 19:30:24 crc kubenswrapper[4935]: I1201 19:30:24.345735 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:30:24 crc kubenswrapper[4935]: I1201 19:30:24.346341 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:30:24 crc kubenswrapper[4935]: I1201 19:30:24.346398 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 19:30:24 crc kubenswrapper[4935]: I1201 19:30:24.347624 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:30:24 crc kubenswrapper[4935]: I1201 19:30:24.347686 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" gracePeriod=600 Dec 01 19:30:24 crc kubenswrapper[4935]: E1201 19:30:24.473637 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:30:25 crc kubenswrapper[4935]: I1201 19:30:25.116785 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" exitCode=0 Dec 01 19:30:25 crc kubenswrapper[4935]: I1201 19:30:25.116908 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416"} Dec 01 19:30:25 crc kubenswrapper[4935]: I1201 19:30:25.117166 4935 scope.go:117] "RemoveContainer" containerID="47200a92c9791b4df1cc46340078b999422d5ea623d96f2ed60b96d61034521a" Dec 01 19:30:25 crc kubenswrapper[4935]: I1201 19:30:25.118321 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:30:25 crc kubenswrapper[4935]: E1201 19:30:25.118915 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:30:38 crc kubenswrapper[4935]: I1201 19:30:38.508955 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:30:38 crc kubenswrapper[4935]: E1201 19:30:38.510514 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:30:43 crc kubenswrapper[4935]: E1201 19:30:43.383635 4935 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.65:54616->38.102.83.65:38587: write tcp 192.168.126.11:10250->192.168.126.11:56596: write: broken pipe Dec 01 19:30:43 crc kubenswrapper[4935]: E1201 19:30:43.384538 4935 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.65:54616->38.102.83.65:38587: write tcp 38.102.83.65:54616->38.102.83.65:38587: write: broken pipe Dec 01 19:30:50 crc kubenswrapper[4935]: I1201 19:30:50.519222 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:30:50 crc kubenswrapper[4935]: E1201 19:30:50.520685 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:31:04 crc kubenswrapper[4935]: I1201 19:31:04.509044 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:31:04 crc kubenswrapper[4935]: E1201 19:31:04.510359 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:31:16 crc kubenswrapper[4935]: I1201 19:31:16.520492 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:31:16 crc kubenswrapper[4935]: E1201 19:31:16.521523 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:31:29 crc kubenswrapper[4935]: I1201 19:31:29.509222 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:31:29 crc kubenswrapper[4935]: E1201 19:31:29.510822 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:31:43 crc kubenswrapper[4935]: I1201 19:31:43.508655 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:31:43 crc kubenswrapper[4935]: E1201 19:31:43.509376 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:31:57 crc kubenswrapper[4935]: I1201 19:31:57.509109 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:31:57 crc kubenswrapper[4935]: E1201 19:31:57.510306 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:32:11 crc kubenswrapper[4935]: I1201 19:32:11.509719 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:32:11 crc kubenswrapper[4935]: E1201 19:32:11.511287 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:32:23 crc kubenswrapper[4935]: I1201 19:32:23.509412 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:32:23 crc kubenswrapper[4935]: E1201 19:32:23.510728 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:32:36 crc kubenswrapper[4935]: I1201 19:32:36.519101 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:32:36 crc kubenswrapper[4935]: E1201 19:32:36.520077 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:32:48 crc kubenswrapper[4935]: I1201 19:32:48.508011 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:32:48 crc kubenswrapper[4935]: E1201 19:32:48.509275 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:32:59 crc kubenswrapper[4935]: I1201 19:32:59.509196 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:32:59 crc kubenswrapper[4935]: E1201 19:32:59.510486 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:33:10 crc kubenswrapper[4935]: I1201 19:33:10.509223 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:33:10 crc kubenswrapper[4935]: E1201 19:33:10.510792 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:33:24 crc kubenswrapper[4935]: I1201 19:33:24.508699 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:33:24 crc kubenswrapper[4935]: E1201 19:33:24.509654 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:33:38 crc kubenswrapper[4935]: I1201 19:33:38.508974 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:33:38 crc kubenswrapper[4935]: E1201 19:33:38.510125 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:33:51 crc kubenswrapper[4935]: I1201 19:33:51.509580 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:33:51 crc kubenswrapper[4935]: E1201 19:33:51.511031 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:34:03 crc kubenswrapper[4935]: I1201 19:34:03.508771 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:34:03 crc kubenswrapper[4935]: E1201 19:34:03.510341 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:34:18 crc kubenswrapper[4935]: I1201 19:34:18.509732 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:34:18 crc kubenswrapper[4935]: E1201 19:34:18.511028 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:34:31 crc kubenswrapper[4935]: I1201 19:34:31.508856 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:34:31 crc kubenswrapper[4935]: E1201 19:34:31.509853 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:34:45 crc kubenswrapper[4935]: I1201 19:34:45.529634 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:34:45 crc kubenswrapper[4935]: E1201 19:34:45.543770 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:34:58 crc kubenswrapper[4935]: I1201 19:34:58.509007 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:34:58 crc kubenswrapper[4935]: E1201 19:34:58.509738 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:35:13 crc kubenswrapper[4935]: I1201 19:35:13.509852 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:35:13 crc kubenswrapper[4935]: E1201 19:35:13.511296 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:35:24 crc kubenswrapper[4935]: I1201 19:35:24.509904 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:35:25 crc kubenswrapper[4935]: I1201 19:35:25.064537 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"b46537a823b7a2e5e2d042692c8e516c7fb4fe9d78b5697251439b932bfdee2e"} Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.087028 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5dc4x"] Dec 01 19:35:29 crc kubenswrapper[4935]: E1201 19:35:29.088819 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c4417c7-b634-4c6f-8609-1777156a606a" containerName="collect-profiles" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.088845 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c4417c7-b634-4c6f-8609-1777156a606a" containerName="collect-profiles" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.089366 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c4417c7-b634-4c6f-8609-1777156a606a" containerName="collect-profiles" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.093028 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.110514 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5dc4x"] Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.221211 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzm94\" (UniqueName: \"kubernetes.io/projected/514eebe2-b9ef-4c79-b410-15d7dbb515a5-kube-api-access-kzm94\") pod \"redhat-marketplace-5dc4x\" (UID: \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\") " pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.221310 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/514eebe2-b9ef-4c79-b410-15d7dbb515a5-utilities\") pod \"redhat-marketplace-5dc4x\" (UID: \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\") " pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.221345 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/514eebe2-b9ef-4c79-b410-15d7dbb515a5-catalog-content\") pod \"redhat-marketplace-5dc4x\" (UID: \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\") " pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.324186 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/514eebe2-b9ef-4c79-b410-15d7dbb515a5-utilities\") pod \"redhat-marketplace-5dc4x\" (UID: \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\") " pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.324276 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/514eebe2-b9ef-4c79-b410-15d7dbb515a5-catalog-content\") pod \"redhat-marketplace-5dc4x\" (UID: \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\") " pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.324462 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzm94\" (UniqueName: \"kubernetes.io/projected/514eebe2-b9ef-4c79-b410-15d7dbb515a5-kube-api-access-kzm94\") pod \"redhat-marketplace-5dc4x\" (UID: \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\") " pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.324899 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/514eebe2-b9ef-4c79-b410-15d7dbb515a5-utilities\") pod \"redhat-marketplace-5dc4x\" (UID: \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\") " pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.325349 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/514eebe2-b9ef-4c79-b410-15d7dbb515a5-catalog-content\") pod \"redhat-marketplace-5dc4x\" (UID: \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\") " pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.348774 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzm94\" (UniqueName: \"kubernetes.io/projected/514eebe2-b9ef-4c79-b410-15d7dbb515a5-kube-api-access-kzm94\") pod \"redhat-marketplace-5dc4x\" (UID: \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\") " pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.435589 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:29 crc kubenswrapper[4935]: I1201 19:35:29.957544 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5dc4x"] Dec 01 19:35:29 crc kubenswrapper[4935]: W1201 19:35:29.957585 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod514eebe2_b9ef_4c79_b410_15d7dbb515a5.slice/crio-e6998de696981a30eb8297559cc496e56d1fffa579299772f8b5c4fffe92a809 WatchSource:0}: Error finding container e6998de696981a30eb8297559cc496e56d1fffa579299772f8b5c4fffe92a809: Status 404 returned error can't find the container with id e6998de696981a30eb8297559cc496e56d1fffa579299772f8b5c4fffe92a809 Dec 01 19:35:30 crc kubenswrapper[4935]: I1201 19:35:30.123050 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5dc4x" event={"ID":"514eebe2-b9ef-4c79-b410-15d7dbb515a5","Type":"ContainerStarted","Data":"e6998de696981a30eb8297559cc496e56d1fffa579299772f8b5c4fffe92a809"} Dec 01 19:35:31 crc kubenswrapper[4935]: I1201 19:35:31.151990 4935 generic.go:334] "Generic (PLEG): container finished" podID="514eebe2-b9ef-4c79-b410-15d7dbb515a5" containerID="940216a49860522e341f7ff7dc04cfaac301ca90cbe3c5e67359347ac0a57752" exitCode=0 Dec 01 19:35:31 crc kubenswrapper[4935]: I1201 19:35:31.152270 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5dc4x" event={"ID":"514eebe2-b9ef-4c79-b410-15d7dbb515a5","Type":"ContainerDied","Data":"940216a49860522e341f7ff7dc04cfaac301ca90cbe3c5e67359347ac0a57752"} Dec 01 19:35:31 crc kubenswrapper[4935]: I1201 19:35:31.155265 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 19:35:33 crc kubenswrapper[4935]: I1201 19:35:33.185173 4935 generic.go:334] "Generic (PLEG): container finished" podID="514eebe2-b9ef-4c79-b410-15d7dbb515a5" containerID="99e2e408d0017501860034374de6ddeb358c37e168980ddf36bdd7167107d4cc" exitCode=0 Dec 01 19:35:33 crc kubenswrapper[4935]: I1201 19:35:33.185324 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5dc4x" event={"ID":"514eebe2-b9ef-4c79-b410-15d7dbb515a5","Type":"ContainerDied","Data":"99e2e408d0017501860034374de6ddeb358c37e168980ddf36bdd7167107d4cc"} Dec 01 19:35:35 crc kubenswrapper[4935]: I1201 19:35:35.227292 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5dc4x" event={"ID":"514eebe2-b9ef-4c79-b410-15d7dbb515a5","Type":"ContainerStarted","Data":"6d989516c632d46139ed4f7636aa05c6c957d386968a4183be060d39d1e51798"} Dec 01 19:35:35 crc kubenswrapper[4935]: I1201 19:35:35.270082 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5dc4x" podStartSLOduration=3.535607808 podStartE2EDuration="6.270044101s" podCreationTimestamp="2025-12-01 19:35:29 +0000 UTC" firstStartedPulling="2025-12-01 19:35:31.154880191 +0000 UTC m=+3945.176509490" lastFinishedPulling="2025-12-01 19:35:33.889316524 +0000 UTC m=+3947.910945783" observedRunningTime="2025-12-01 19:35:35.256119278 +0000 UTC m=+3949.277748547" watchObservedRunningTime="2025-12-01 19:35:35.270044101 +0000 UTC m=+3949.291673370" Dec 01 19:35:39 crc kubenswrapper[4935]: I1201 19:35:39.436645 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:39 crc kubenswrapper[4935]: I1201 19:35:39.438254 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:39 crc kubenswrapper[4935]: I1201 19:35:39.496943 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:40 crc kubenswrapper[4935]: I1201 19:35:40.376603 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:40 crc kubenswrapper[4935]: I1201 19:35:40.459882 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5dc4x"] Dec 01 19:35:42 crc kubenswrapper[4935]: I1201 19:35:42.318998 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5dc4x" podUID="514eebe2-b9ef-4c79-b410-15d7dbb515a5" containerName="registry-server" containerID="cri-o://6d989516c632d46139ed4f7636aa05c6c957d386968a4183be060d39d1e51798" gracePeriod=2 Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.033511 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.143218 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/514eebe2-b9ef-4c79-b410-15d7dbb515a5-catalog-content\") pod \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\" (UID: \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\") " Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.143419 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzm94\" (UniqueName: \"kubernetes.io/projected/514eebe2-b9ef-4c79-b410-15d7dbb515a5-kube-api-access-kzm94\") pod \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\" (UID: \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\") " Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.143489 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/514eebe2-b9ef-4c79-b410-15d7dbb515a5-utilities\") pod \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\" (UID: \"514eebe2-b9ef-4c79-b410-15d7dbb515a5\") " Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.144161 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/514eebe2-b9ef-4c79-b410-15d7dbb515a5-utilities" (OuterVolumeSpecName: "utilities") pod "514eebe2-b9ef-4c79-b410-15d7dbb515a5" (UID: "514eebe2-b9ef-4c79-b410-15d7dbb515a5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.145432 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/514eebe2-b9ef-4c79-b410-15d7dbb515a5-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.149705 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/514eebe2-b9ef-4c79-b410-15d7dbb515a5-kube-api-access-kzm94" (OuterVolumeSpecName: "kube-api-access-kzm94") pod "514eebe2-b9ef-4c79-b410-15d7dbb515a5" (UID: "514eebe2-b9ef-4c79-b410-15d7dbb515a5"). InnerVolumeSpecName "kube-api-access-kzm94". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.162124 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/514eebe2-b9ef-4c79-b410-15d7dbb515a5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "514eebe2-b9ef-4c79-b410-15d7dbb515a5" (UID: "514eebe2-b9ef-4c79-b410-15d7dbb515a5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.248040 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzm94\" (UniqueName: \"kubernetes.io/projected/514eebe2-b9ef-4c79-b410-15d7dbb515a5-kube-api-access-kzm94\") on node \"crc\" DevicePath \"\"" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.248094 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/514eebe2-b9ef-4c79-b410-15d7dbb515a5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.335916 4935 generic.go:334] "Generic (PLEG): container finished" podID="514eebe2-b9ef-4c79-b410-15d7dbb515a5" containerID="6d989516c632d46139ed4f7636aa05c6c957d386968a4183be060d39d1e51798" exitCode=0 Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.335995 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5dc4x" event={"ID":"514eebe2-b9ef-4c79-b410-15d7dbb515a5","Type":"ContainerDied","Data":"6d989516c632d46139ed4f7636aa05c6c957d386968a4183be060d39d1e51798"} Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.336079 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5dc4x" event={"ID":"514eebe2-b9ef-4c79-b410-15d7dbb515a5","Type":"ContainerDied","Data":"e6998de696981a30eb8297559cc496e56d1fffa579299772f8b5c4fffe92a809"} Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.336105 4935 scope.go:117] "RemoveContainer" containerID="6d989516c632d46139ed4f7636aa05c6c957d386968a4183be060d39d1e51798" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.336025 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5dc4x" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.381133 4935 scope.go:117] "RemoveContainer" containerID="99e2e408d0017501860034374de6ddeb358c37e168980ddf36bdd7167107d4cc" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.381343 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5dc4x"] Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.389455 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5dc4x"] Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.436341 4935 scope.go:117] "RemoveContainer" containerID="940216a49860522e341f7ff7dc04cfaac301ca90cbe3c5e67359347ac0a57752" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.476869 4935 scope.go:117] "RemoveContainer" containerID="6d989516c632d46139ed4f7636aa05c6c957d386968a4183be060d39d1e51798" Dec 01 19:35:43 crc kubenswrapper[4935]: E1201 19:35:43.477546 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d989516c632d46139ed4f7636aa05c6c957d386968a4183be060d39d1e51798\": container with ID starting with 6d989516c632d46139ed4f7636aa05c6c957d386968a4183be060d39d1e51798 not found: ID does not exist" containerID="6d989516c632d46139ed4f7636aa05c6c957d386968a4183be060d39d1e51798" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.477586 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d989516c632d46139ed4f7636aa05c6c957d386968a4183be060d39d1e51798"} err="failed to get container status \"6d989516c632d46139ed4f7636aa05c6c957d386968a4183be060d39d1e51798\": rpc error: code = NotFound desc = could not find container \"6d989516c632d46139ed4f7636aa05c6c957d386968a4183be060d39d1e51798\": container with ID starting with 6d989516c632d46139ed4f7636aa05c6c957d386968a4183be060d39d1e51798 not found: ID does not exist" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.477613 4935 scope.go:117] "RemoveContainer" containerID="99e2e408d0017501860034374de6ddeb358c37e168980ddf36bdd7167107d4cc" Dec 01 19:35:43 crc kubenswrapper[4935]: E1201 19:35:43.478022 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99e2e408d0017501860034374de6ddeb358c37e168980ddf36bdd7167107d4cc\": container with ID starting with 99e2e408d0017501860034374de6ddeb358c37e168980ddf36bdd7167107d4cc not found: ID does not exist" containerID="99e2e408d0017501860034374de6ddeb358c37e168980ddf36bdd7167107d4cc" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.478066 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99e2e408d0017501860034374de6ddeb358c37e168980ddf36bdd7167107d4cc"} err="failed to get container status \"99e2e408d0017501860034374de6ddeb358c37e168980ddf36bdd7167107d4cc\": rpc error: code = NotFound desc = could not find container \"99e2e408d0017501860034374de6ddeb358c37e168980ddf36bdd7167107d4cc\": container with ID starting with 99e2e408d0017501860034374de6ddeb358c37e168980ddf36bdd7167107d4cc not found: ID does not exist" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.478086 4935 scope.go:117] "RemoveContainer" containerID="940216a49860522e341f7ff7dc04cfaac301ca90cbe3c5e67359347ac0a57752" Dec 01 19:35:43 crc kubenswrapper[4935]: E1201 19:35:43.478527 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"940216a49860522e341f7ff7dc04cfaac301ca90cbe3c5e67359347ac0a57752\": container with ID starting with 940216a49860522e341f7ff7dc04cfaac301ca90cbe3c5e67359347ac0a57752 not found: ID does not exist" containerID="940216a49860522e341f7ff7dc04cfaac301ca90cbe3c5e67359347ac0a57752" Dec 01 19:35:43 crc kubenswrapper[4935]: I1201 19:35:43.478600 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"940216a49860522e341f7ff7dc04cfaac301ca90cbe3c5e67359347ac0a57752"} err="failed to get container status \"940216a49860522e341f7ff7dc04cfaac301ca90cbe3c5e67359347ac0a57752\": rpc error: code = NotFound desc = could not find container \"940216a49860522e341f7ff7dc04cfaac301ca90cbe3c5e67359347ac0a57752\": container with ID starting with 940216a49860522e341f7ff7dc04cfaac301ca90cbe3c5e67359347ac0a57752 not found: ID does not exist" Dec 01 19:35:44 crc kubenswrapper[4935]: I1201 19:35:44.527063 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="514eebe2-b9ef-4c79-b410-15d7dbb515a5" path="/var/lib/kubelet/pods/514eebe2-b9ef-4c79-b410-15d7dbb515a5/volumes" Dec 01 19:37:24 crc kubenswrapper[4935]: I1201 19:37:24.345916 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:37:24 crc kubenswrapper[4935]: I1201 19:37:24.346778 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:37:54 crc kubenswrapper[4935]: I1201 19:37:54.346048 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:37:54 crc kubenswrapper[4935]: I1201 19:37:54.347717 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:38:24 crc kubenswrapper[4935]: I1201 19:38:24.346485 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:38:24 crc kubenswrapper[4935]: I1201 19:38:24.347338 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:38:24 crc kubenswrapper[4935]: I1201 19:38:24.347407 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 19:38:24 crc kubenswrapper[4935]: I1201 19:38:24.348769 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b46537a823b7a2e5e2d042692c8e516c7fb4fe9d78b5697251439b932bfdee2e"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:38:24 crc kubenswrapper[4935]: I1201 19:38:24.348886 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://b46537a823b7a2e5e2d042692c8e516c7fb4fe9d78b5697251439b932bfdee2e" gracePeriod=600 Dec 01 19:38:25 crc kubenswrapper[4935]: I1201 19:38:25.477111 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="b46537a823b7a2e5e2d042692c8e516c7fb4fe9d78b5697251439b932bfdee2e" exitCode=0 Dec 01 19:38:25 crc kubenswrapper[4935]: I1201 19:38:25.477205 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"b46537a823b7a2e5e2d042692c8e516c7fb4fe9d78b5697251439b932bfdee2e"} Dec 01 19:38:25 crc kubenswrapper[4935]: I1201 19:38:25.477638 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5"} Dec 01 19:38:25 crc kubenswrapper[4935]: I1201 19:38:25.477660 4935 scope.go:117] "RemoveContainer" containerID="27889c7119ec47c1ecd2b05523a6c2be08f4a62d3293c0da6b8c857930a95416" Dec 01 19:40:00 crc kubenswrapper[4935]: I1201 19:40:00.797969 4935 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="88d842df-da24-4955-aae0-e6125a01ed0b" containerName="galera" probeResult="failure" output="command timed out" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.570331 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xp5zz"] Dec 01 19:40:05 crc kubenswrapper[4935]: E1201 19:40:05.571846 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="514eebe2-b9ef-4c79-b410-15d7dbb515a5" containerName="registry-server" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.571865 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="514eebe2-b9ef-4c79-b410-15d7dbb515a5" containerName="registry-server" Dec 01 19:40:05 crc kubenswrapper[4935]: E1201 19:40:05.571894 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="514eebe2-b9ef-4c79-b410-15d7dbb515a5" containerName="extract-utilities" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.571903 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="514eebe2-b9ef-4c79-b410-15d7dbb515a5" containerName="extract-utilities" Dec 01 19:40:05 crc kubenswrapper[4935]: E1201 19:40:05.571971 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="514eebe2-b9ef-4c79-b410-15d7dbb515a5" containerName="extract-content" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.571979 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="514eebe2-b9ef-4c79-b410-15d7dbb515a5" containerName="extract-content" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.572382 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="514eebe2-b9ef-4c79-b410-15d7dbb515a5" containerName="registry-server" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.574247 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.586440 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xp5zz"] Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.658308 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4shj\" (UniqueName: \"kubernetes.io/projected/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-kube-api-access-s4shj\") pod \"community-operators-xp5zz\" (UID: \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\") " pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.658363 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-catalog-content\") pod \"community-operators-xp5zz\" (UID: \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\") " pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.658394 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-utilities\") pod \"community-operators-xp5zz\" (UID: \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\") " pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.761004 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4shj\" (UniqueName: \"kubernetes.io/projected/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-kube-api-access-s4shj\") pod \"community-operators-xp5zz\" (UID: \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\") " pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.761064 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-catalog-content\") pod \"community-operators-xp5zz\" (UID: \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\") " pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.761125 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-utilities\") pod \"community-operators-xp5zz\" (UID: \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\") " pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.761711 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-catalog-content\") pod \"community-operators-xp5zz\" (UID: \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\") " pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.761782 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-utilities\") pod \"community-operators-xp5zz\" (UID: \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\") " pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.793788 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4shj\" (UniqueName: \"kubernetes.io/projected/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-kube-api-access-s4shj\") pod \"community-operators-xp5zz\" (UID: \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\") " pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:05 crc kubenswrapper[4935]: I1201 19:40:05.905389 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:06 crc kubenswrapper[4935]: I1201 19:40:06.409112 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xp5zz"] Dec 01 19:40:06 crc kubenswrapper[4935]: I1201 19:40:06.876722 4935 generic.go:334] "Generic (PLEG): container finished" podID="8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" containerID="114b5a7e2075531512cf80d4edd27c4f4dd3ffd652c82f230bc8eca5ec1268f7" exitCode=0 Dec 01 19:40:06 crc kubenswrapper[4935]: I1201 19:40:06.876990 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xp5zz" event={"ID":"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a","Type":"ContainerDied","Data":"114b5a7e2075531512cf80d4edd27c4f4dd3ffd652c82f230bc8eca5ec1268f7"} Dec 01 19:40:06 crc kubenswrapper[4935]: I1201 19:40:06.877045 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xp5zz" event={"ID":"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a","Type":"ContainerStarted","Data":"e03285af84337e5b9d2ba3a246ed7430ec9479f54a38b0f655c5b43e72af708a"} Dec 01 19:40:08 crc kubenswrapper[4935]: I1201 19:40:08.919129 4935 generic.go:334] "Generic (PLEG): container finished" podID="8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" containerID="8a70ac6814560ba1cbeb8fd6726d03b02a426c3975976c9b5abbc53014c2eb71" exitCode=0 Dec 01 19:40:08 crc kubenswrapper[4935]: I1201 19:40:08.919867 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xp5zz" event={"ID":"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a","Type":"ContainerDied","Data":"8a70ac6814560ba1cbeb8fd6726d03b02a426c3975976c9b5abbc53014c2eb71"} Dec 01 19:40:10 crc kubenswrapper[4935]: I1201 19:40:10.947486 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xp5zz" event={"ID":"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a","Type":"ContainerStarted","Data":"e6692a1a48d8b3e6eac9d55da6ab2bda778c9800e265aee874c281b13d2087c7"} Dec 01 19:40:10 crc kubenswrapper[4935]: I1201 19:40:10.984643 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xp5zz" podStartSLOduration=3.352193485 podStartE2EDuration="5.984621023s" podCreationTimestamp="2025-12-01 19:40:05 +0000 UTC" firstStartedPulling="2025-12-01 19:40:06.88033257 +0000 UTC m=+4220.901961829" lastFinishedPulling="2025-12-01 19:40:09.512760078 +0000 UTC m=+4223.534389367" observedRunningTime="2025-12-01 19:40:10.969691079 +0000 UTC m=+4224.991320348" watchObservedRunningTime="2025-12-01 19:40:10.984621023 +0000 UTC m=+4225.006250292" Dec 01 19:40:15 crc kubenswrapper[4935]: I1201 19:40:15.905619 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:15 crc kubenswrapper[4935]: I1201 19:40:15.906328 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:15 crc kubenswrapper[4935]: I1201 19:40:15.979849 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:16 crc kubenswrapper[4935]: I1201 19:40:16.104854 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:16 crc kubenswrapper[4935]: I1201 19:40:16.229499 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xp5zz"] Dec 01 19:40:18 crc kubenswrapper[4935]: I1201 19:40:18.058000 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xp5zz" podUID="8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" containerName="registry-server" containerID="cri-o://e6692a1a48d8b3e6eac9d55da6ab2bda778c9800e265aee874c281b13d2087c7" gracePeriod=2 Dec 01 19:40:18 crc kubenswrapper[4935]: I1201 19:40:18.751801 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:18 crc kubenswrapper[4935]: I1201 19:40:18.808730 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-catalog-content\") pod \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\" (UID: \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\") " Dec 01 19:40:18 crc kubenswrapper[4935]: I1201 19:40:18.809003 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-utilities\") pod \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\" (UID: \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\") " Dec 01 19:40:18 crc kubenswrapper[4935]: I1201 19:40:18.809042 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4shj\" (UniqueName: \"kubernetes.io/projected/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-kube-api-access-s4shj\") pod \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\" (UID: \"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a\") " Dec 01 19:40:18 crc kubenswrapper[4935]: I1201 19:40:18.810292 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-utilities" (OuterVolumeSpecName: "utilities") pod "8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" (UID: "8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:40:18 crc kubenswrapper[4935]: I1201 19:40:18.820752 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-kube-api-access-s4shj" (OuterVolumeSpecName: "kube-api-access-s4shj") pod "8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" (UID: "8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a"). InnerVolumeSpecName "kube-api-access-s4shj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:40:18 crc kubenswrapper[4935]: I1201 19:40:18.873850 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" (UID: "8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:40:18 crc kubenswrapper[4935]: I1201 19:40:18.911216 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:40:18 crc kubenswrapper[4935]: I1201 19:40:18.911446 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:40:18 crc kubenswrapper[4935]: I1201 19:40:18.911456 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4shj\" (UniqueName: \"kubernetes.io/projected/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a-kube-api-access-s4shj\") on node \"crc\" DevicePath \"\"" Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.069506 4935 generic.go:334] "Generic (PLEG): container finished" podID="8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" containerID="e6692a1a48d8b3e6eac9d55da6ab2bda778c9800e265aee874c281b13d2087c7" exitCode=0 Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.069578 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xp5zz" event={"ID":"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a","Type":"ContainerDied","Data":"e6692a1a48d8b3e6eac9d55da6ab2bda778c9800e265aee874c281b13d2087c7"} Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.069634 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xp5zz" event={"ID":"8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a","Type":"ContainerDied","Data":"e03285af84337e5b9d2ba3a246ed7430ec9479f54a38b0f655c5b43e72af708a"} Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.069654 4935 scope.go:117] "RemoveContainer" containerID="e6692a1a48d8b3e6eac9d55da6ab2bda778c9800e265aee874c281b13d2087c7" Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.070267 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xp5zz" Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.091014 4935 scope.go:117] "RemoveContainer" containerID="8a70ac6814560ba1cbeb8fd6726d03b02a426c3975976c9b5abbc53014c2eb71" Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.109639 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xp5zz"] Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.121679 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xp5zz"] Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.132501 4935 scope.go:117] "RemoveContainer" containerID="114b5a7e2075531512cf80d4edd27c4f4dd3ffd652c82f230bc8eca5ec1268f7" Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.178052 4935 scope.go:117] "RemoveContainer" containerID="e6692a1a48d8b3e6eac9d55da6ab2bda778c9800e265aee874c281b13d2087c7" Dec 01 19:40:19 crc kubenswrapper[4935]: E1201 19:40:19.178680 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6692a1a48d8b3e6eac9d55da6ab2bda778c9800e265aee874c281b13d2087c7\": container with ID starting with e6692a1a48d8b3e6eac9d55da6ab2bda778c9800e265aee874c281b13d2087c7 not found: ID does not exist" containerID="e6692a1a48d8b3e6eac9d55da6ab2bda778c9800e265aee874c281b13d2087c7" Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.178730 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6692a1a48d8b3e6eac9d55da6ab2bda778c9800e265aee874c281b13d2087c7"} err="failed to get container status \"e6692a1a48d8b3e6eac9d55da6ab2bda778c9800e265aee874c281b13d2087c7\": rpc error: code = NotFound desc = could not find container \"e6692a1a48d8b3e6eac9d55da6ab2bda778c9800e265aee874c281b13d2087c7\": container with ID starting with e6692a1a48d8b3e6eac9d55da6ab2bda778c9800e265aee874c281b13d2087c7 not found: ID does not exist" Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.178752 4935 scope.go:117] "RemoveContainer" containerID="8a70ac6814560ba1cbeb8fd6726d03b02a426c3975976c9b5abbc53014c2eb71" Dec 01 19:40:19 crc kubenswrapper[4935]: E1201 19:40:19.179084 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a70ac6814560ba1cbeb8fd6726d03b02a426c3975976c9b5abbc53014c2eb71\": container with ID starting with 8a70ac6814560ba1cbeb8fd6726d03b02a426c3975976c9b5abbc53014c2eb71 not found: ID does not exist" containerID="8a70ac6814560ba1cbeb8fd6726d03b02a426c3975976c9b5abbc53014c2eb71" Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.179106 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a70ac6814560ba1cbeb8fd6726d03b02a426c3975976c9b5abbc53014c2eb71"} err="failed to get container status \"8a70ac6814560ba1cbeb8fd6726d03b02a426c3975976c9b5abbc53014c2eb71\": rpc error: code = NotFound desc = could not find container \"8a70ac6814560ba1cbeb8fd6726d03b02a426c3975976c9b5abbc53014c2eb71\": container with ID starting with 8a70ac6814560ba1cbeb8fd6726d03b02a426c3975976c9b5abbc53014c2eb71 not found: ID does not exist" Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.179120 4935 scope.go:117] "RemoveContainer" containerID="114b5a7e2075531512cf80d4edd27c4f4dd3ffd652c82f230bc8eca5ec1268f7" Dec 01 19:40:19 crc kubenswrapper[4935]: E1201 19:40:19.179455 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"114b5a7e2075531512cf80d4edd27c4f4dd3ffd652c82f230bc8eca5ec1268f7\": container with ID starting with 114b5a7e2075531512cf80d4edd27c4f4dd3ffd652c82f230bc8eca5ec1268f7 not found: ID does not exist" containerID="114b5a7e2075531512cf80d4edd27c4f4dd3ffd652c82f230bc8eca5ec1268f7" Dec 01 19:40:19 crc kubenswrapper[4935]: I1201 19:40:19.179473 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"114b5a7e2075531512cf80d4edd27c4f4dd3ffd652c82f230bc8eca5ec1268f7"} err="failed to get container status \"114b5a7e2075531512cf80d4edd27c4f4dd3ffd652c82f230bc8eca5ec1268f7\": rpc error: code = NotFound desc = could not find container \"114b5a7e2075531512cf80d4edd27c4f4dd3ffd652c82f230bc8eca5ec1268f7\": container with ID starting with 114b5a7e2075531512cf80d4edd27c4f4dd3ffd652c82f230bc8eca5ec1268f7 not found: ID does not exist" Dec 01 19:40:20 crc kubenswrapper[4935]: I1201 19:40:20.530802 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" path="/var/lib/kubelet/pods/8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a/volumes" Dec 01 19:40:24 crc kubenswrapper[4935]: I1201 19:40:24.345809 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:40:24 crc kubenswrapper[4935]: I1201 19:40:24.346604 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:40:54 crc kubenswrapper[4935]: I1201 19:40:54.345740 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:40:54 crc kubenswrapper[4935]: I1201 19:40:54.346363 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:41:24 crc kubenswrapper[4935]: I1201 19:41:24.345983 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:41:24 crc kubenswrapper[4935]: I1201 19:41:24.346629 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:41:24 crc kubenswrapper[4935]: I1201 19:41:24.346695 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 19:41:24 crc kubenswrapper[4935]: I1201 19:41:24.347629 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:41:24 crc kubenswrapper[4935]: I1201 19:41:24.347703 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" gracePeriod=600 Dec 01 19:41:24 crc kubenswrapper[4935]: E1201 19:41:24.476332 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:41:24 crc kubenswrapper[4935]: I1201 19:41:24.944740 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" exitCode=0 Dec 01 19:41:24 crc kubenswrapper[4935]: I1201 19:41:24.944829 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5"} Dec 01 19:41:24 crc kubenswrapper[4935]: I1201 19:41:24.945067 4935 scope.go:117] "RemoveContainer" containerID="b46537a823b7a2e5e2d042692c8e516c7fb4fe9d78b5697251439b932bfdee2e" Dec 01 19:41:24 crc kubenswrapper[4935]: I1201 19:41:24.945875 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:41:24 crc kubenswrapper[4935]: E1201 19:41:24.946338 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:41:38 crc kubenswrapper[4935]: I1201 19:41:38.508806 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:41:38 crc kubenswrapper[4935]: E1201 19:41:38.509615 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.053287 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8bfpd"] Dec 01 19:41:48 crc kubenswrapper[4935]: E1201 19:41:48.054866 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" containerName="extract-content" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.054898 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" containerName="extract-content" Dec 01 19:41:48 crc kubenswrapper[4935]: E1201 19:41:48.054979 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" containerName="registry-server" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.054995 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" containerName="registry-server" Dec 01 19:41:48 crc kubenswrapper[4935]: E1201 19:41:48.055039 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" containerName="extract-utilities" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.055053 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" containerName="extract-utilities" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.055551 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf4b7ee-20f5-4dc1-9980-0d4f14584a2a" containerName="registry-server" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.061396 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.077820 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8bfpd"] Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.172448 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8eef3990-f624-4694-9fc2-1043882e1b80-catalog-content\") pod \"certified-operators-8bfpd\" (UID: \"8eef3990-f624-4694-9fc2-1043882e1b80\") " pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.172525 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8eef3990-f624-4694-9fc2-1043882e1b80-utilities\") pod \"certified-operators-8bfpd\" (UID: \"8eef3990-f624-4694-9fc2-1043882e1b80\") " pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.172631 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gjjw\" (UniqueName: \"kubernetes.io/projected/8eef3990-f624-4694-9fc2-1043882e1b80-kube-api-access-7gjjw\") pod \"certified-operators-8bfpd\" (UID: \"8eef3990-f624-4694-9fc2-1043882e1b80\") " pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.275023 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8eef3990-f624-4694-9fc2-1043882e1b80-catalog-content\") pod \"certified-operators-8bfpd\" (UID: \"8eef3990-f624-4694-9fc2-1043882e1b80\") " pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.275085 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8eef3990-f624-4694-9fc2-1043882e1b80-utilities\") pod \"certified-operators-8bfpd\" (UID: \"8eef3990-f624-4694-9fc2-1043882e1b80\") " pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.275124 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gjjw\" (UniqueName: \"kubernetes.io/projected/8eef3990-f624-4694-9fc2-1043882e1b80-kube-api-access-7gjjw\") pod \"certified-operators-8bfpd\" (UID: \"8eef3990-f624-4694-9fc2-1043882e1b80\") " pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.275678 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8eef3990-f624-4694-9fc2-1043882e1b80-catalog-content\") pod \"certified-operators-8bfpd\" (UID: \"8eef3990-f624-4694-9fc2-1043882e1b80\") " pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.275788 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8eef3990-f624-4694-9fc2-1043882e1b80-utilities\") pod \"certified-operators-8bfpd\" (UID: \"8eef3990-f624-4694-9fc2-1043882e1b80\") " pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.302848 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gjjw\" (UniqueName: \"kubernetes.io/projected/8eef3990-f624-4694-9fc2-1043882e1b80-kube-api-access-7gjjw\") pod \"certified-operators-8bfpd\" (UID: \"8eef3990-f624-4694-9fc2-1043882e1b80\") " pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.388651 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:48 crc kubenswrapper[4935]: I1201 19:41:48.944712 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8bfpd"] Dec 01 19:41:49 crc kubenswrapper[4935]: I1201 19:41:49.278036 4935 generic.go:334] "Generic (PLEG): container finished" podID="8eef3990-f624-4694-9fc2-1043882e1b80" containerID="4c534243bf820f5874e8d1d5573d4054cbc3923b6e5c033c456cacdab223c7fd" exitCode=0 Dec 01 19:41:49 crc kubenswrapper[4935]: I1201 19:41:49.278279 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bfpd" event={"ID":"8eef3990-f624-4694-9fc2-1043882e1b80","Type":"ContainerDied","Data":"4c534243bf820f5874e8d1d5573d4054cbc3923b6e5c033c456cacdab223c7fd"} Dec 01 19:41:49 crc kubenswrapper[4935]: I1201 19:41:49.278363 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bfpd" event={"ID":"8eef3990-f624-4694-9fc2-1043882e1b80","Type":"ContainerStarted","Data":"85e52f5b617e1b7b9a3146a412dde777205c3114404a826a2b0ebf02220a95e2"} Dec 01 19:41:49 crc kubenswrapper[4935]: I1201 19:41:49.281585 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 19:41:51 crc kubenswrapper[4935]: I1201 19:41:51.302944 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bfpd" event={"ID":"8eef3990-f624-4694-9fc2-1043882e1b80","Type":"ContainerStarted","Data":"6d4de7ddc27e340f3982793f0a913c62388a996be65682aeef09c021aaf55514"} Dec 01 19:41:51 crc kubenswrapper[4935]: I1201 19:41:51.508513 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:41:51 crc kubenswrapper[4935]: E1201 19:41:51.508870 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:41:52 crc kubenswrapper[4935]: I1201 19:41:52.317189 4935 generic.go:334] "Generic (PLEG): container finished" podID="8eef3990-f624-4694-9fc2-1043882e1b80" containerID="6d4de7ddc27e340f3982793f0a913c62388a996be65682aeef09c021aaf55514" exitCode=0 Dec 01 19:41:52 crc kubenswrapper[4935]: I1201 19:41:52.317259 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bfpd" event={"ID":"8eef3990-f624-4694-9fc2-1043882e1b80","Type":"ContainerDied","Data":"6d4de7ddc27e340f3982793f0a913c62388a996be65682aeef09c021aaf55514"} Dec 01 19:41:53 crc kubenswrapper[4935]: I1201 19:41:53.332714 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bfpd" event={"ID":"8eef3990-f624-4694-9fc2-1043882e1b80","Type":"ContainerStarted","Data":"a35d179848772832f1ca6e9ca5d7c6be6a921a6a71b9890d367b910de8804232"} Dec 01 19:41:53 crc kubenswrapper[4935]: I1201 19:41:53.364551 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8bfpd" podStartSLOduration=1.847113759 podStartE2EDuration="5.364531569s" podCreationTimestamp="2025-12-01 19:41:48 +0000 UTC" firstStartedPulling="2025-12-01 19:41:49.281351493 +0000 UTC m=+4323.302980742" lastFinishedPulling="2025-12-01 19:41:52.798769283 +0000 UTC m=+4326.820398552" observedRunningTime="2025-12-01 19:41:53.358073728 +0000 UTC m=+4327.379702997" watchObservedRunningTime="2025-12-01 19:41:53.364531569 +0000 UTC m=+4327.386160838" Dec 01 19:41:58 crc kubenswrapper[4935]: I1201 19:41:58.389405 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:58 crc kubenswrapper[4935]: I1201 19:41:58.390004 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:58 crc kubenswrapper[4935]: I1201 19:41:58.445841 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:59 crc kubenswrapper[4935]: I1201 19:41:59.462058 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:41:59 crc kubenswrapper[4935]: I1201 19:41:59.569244 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8bfpd"] Dec 01 19:42:01 crc kubenswrapper[4935]: I1201 19:42:01.432528 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8bfpd" podUID="8eef3990-f624-4694-9fc2-1043882e1b80" containerName="registry-server" containerID="cri-o://a35d179848772832f1ca6e9ca5d7c6be6a921a6a71b9890d367b910de8804232" gracePeriod=2 Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.016328 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.138352 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8eef3990-f624-4694-9fc2-1043882e1b80-utilities\") pod \"8eef3990-f624-4694-9fc2-1043882e1b80\" (UID: \"8eef3990-f624-4694-9fc2-1043882e1b80\") " Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.138448 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gjjw\" (UniqueName: \"kubernetes.io/projected/8eef3990-f624-4694-9fc2-1043882e1b80-kube-api-access-7gjjw\") pod \"8eef3990-f624-4694-9fc2-1043882e1b80\" (UID: \"8eef3990-f624-4694-9fc2-1043882e1b80\") " Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.138907 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8eef3990-f624-4694-9fc2-1043882e1b80-catalog-content\") pod \"8eef3990-f624-4694-9fc2-1043882e1b80\" (UID: \"8eef3990-f624-4694-9fc2-1043882e1b80\") " Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.139566 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8eef3990-f624-4694-9fc2-1043882e1b80-utilities" (OuterVolumeSpecName: "utilities") pod "8eef3990-f624-4694-9fc2-1043882e1b80" (UID: "8eef3990-f624-4694-9fc2-1043882e1b80"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.139964 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8eef3990-f624-4694-9fc2-1043882e1b80-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.151014 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8eef3990-f624-4694-9fc2-1043882e1b80-kube-api-access-7gjjw" (OuterVolumeSpecName: "kube-api-access-7gjjw") pod "8eef3990-f624-4694-9fc2-1043882e1b80" (UID: "8eef3990-f624-4694-9fc2-1043882e1b80"). InnerVolumeSpecName "kube-api-access-7gjjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.213085 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8eef3990-f624-4694-9fc2-1043882e1b80-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8eef3990-f624-4694-9fc2-1043882e1b80" (UID: "8eef3990-f624-4694-9fc2-1043882e1b80"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.243876 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8eef3990-f624-4694-9fc2-1043882e1b80-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.243946 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gjjw\" (UniqueName: \"kubernetes.io/projected/8eef3990-f624-4694-9fc2-1043882e1b80-kube-api-access-7gjjw\") on node \"crc\" DevicePath \"\"" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.453924 4935 generic.go:334] "Generic (PLEG): container finished" podID="8eef3990-f624-4694-9fc2-1043882e1b80" containerID="a35d179848772832f1ca6e9ca5d7c6be6a921a6a71b9890d367b910de8804232" exitCode=0 Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.454004 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bfpd" event={"ID":"8eef3990-f624-4694-9fc2-1043882e1b80","Type":"ContainerDied","Data":"a35d179848772832f1ca6e9ca5d7c6be6a921a6a71b9890d367b910de8804232"} Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.454049 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bfpd" event={"ID":"8eef3990-f624-4694-9fc2-1043882e1b80","Type":"ContainerDied","Data":"85e52f5b617e1b7b9a3146a412dde777205c3114404a826a2b0ebf02220a95e2"} Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.454103 4935 scope.go:117] "RemoveContainer" containerID="a35d179848772832f1ca6e9ca5d7c6be6a921a6a71b9890d367b910de8804232" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.454354 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8bfpd" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.510778 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:42:02 crc kubenswrapper[4935]: E1201 19:42:02.511633 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.517020 4935 scope.go:117] "RemoveContainer" containerID="6d4de7ddc27e340f3982793f0a913c62388a996be65682aeef09c021aaf55514" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.539311 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8bfpd"] Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.541430 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8bfpd"] Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.557806 4935 scope.go:117] "RemoveContainer" containerID="4c534243bf820f5874e8d1d5573d4054cbc3923b6e5c033c456cacdab223c7fd" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.622012 4935 scope.go:117] "RemoveContainer" containerID="a35d179848772832f1ca6e9ca5d7c6be6a921a6a71b9890d367b910de8804232" Dec 01 19:42:02 crc kubenswrapper[4935]: E1201 19:42:02.622694 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a35d179848772832f1ca6e9ca5d7c6be6a921a6a71b9890d367b910de8804232\": container with ID starting with a35d179848772832f1ca6e9ca5d7c6be6a921a6a71b9890d367b910de8804232 not found: ID does not exist" containerID="a35d179848772832f1ca6e9ca5d7c6be6a921a6a71b9890d367b910de8804232" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.622821 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a35d179848772832f1ca6e9ca5d7c6be6a921a6a71b9890d367b910de8804232"} err="failed to get container status \"a35d179848772832f1ca6e9ca5d7c6be6a921a6a71b9890d367b910de8804232\": rpc error: code = NotFound desc = could not find container \"a35d179848772832f1ca6e9ca5d7c6be6a921a6a71b9890d367b910de8804232\": container with ID starting with a35d179848772832f1ca6e9ca5d7c6be6a921a6a71b9890d367b910de8804232 not found: ID does not exist" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.622946 4935 scope.go:117] "RemoveContainer" containerID="6d4de7ddc27e340f3982793f0a913c62388a996be65682aeef09c021aaf55514" Dec 01 19:42:02 crc kubenswrapper[4935]: E1201 19:42:02.623527 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d4de7ddc27e340f3982793f0a913c62388a996be65682aeef09c021aaf55514\": container with ID starting with 6d4de7ddc27e340f3982793f0a913c62388a996be65682aeef09c021aaf55514 not found: ID does not exist" containerID="6d4de7ddc27e340f3982793f0a913c62388a996be65682aeef09c021aaf55514" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.623573 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d4de7ddc27e340f3982793f0a913c62388a996be65682aeef09c021aaf55514"} err="failed to get container status \"6d4de7ddc27e340f3982793f0a913c62388a996be65682aeef09c021aaf55514\": rpc error: code = NotFound desc = could not find container \"6d4de7ddc27e340f3982793f0a913c62388a996be65682aeef09c021aaf55514\": container with ID starting with 6d4de7ddc27e340f3982793f0a913c62388a996be65682aeef09c021aaf55514 not found: ID does not exist" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.623602 4935 scope.go:117] "RemoveContainer" containerID="4c534243bf820f5874e8d1d5573d4054cbc3923b6e5c033c456cacdab223c7fd" Dec 01 19:42:02 crc kubenswrapper[4935]: E1201 19:42:02.624055 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c534243bf820f5874e8d1d5573d4054cbc3923b6e5c033c456cacdab223c7fd\": container with ID starting with 4c534243bf820f5874e8d1d5573d4054cbc3923b6e5c033c456cacdab223c7fd not found: ID does not exist" containerID="4c534243bf820f5874e8d1d5573d4054cbc3923b6e5c033c456cacdab223c7fd" Dec 01 19:42:02 crc kubenswrapper[4935]: I1201 19:42:02.624204 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c534243bf820f5874e8d1d5573d4054cbc3923b6e5c033c456cacdab223c7fd"} err="failed to get container status \"4c534243bf820f5874e8d1d5573d4054cbc3923b6e5c033c456cacdab223c7fd\": rpc error: code = NotFound desc = could not find container \"4c534243bf820f5874e8d1d5573d4054cbc3923b6e5c033c456cacdab223c7fd\": container with ID starting with 4c534243bf820f5874e8d1d5573d4054cbc3923b6e5c033c456cacdab223c7fd not found: ID does not exist" Dec 01 19:42:04 crc kubenswrapper[4935]: I1201 19:42:04.525876 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8eef3990-f624-4694-9fc2-1043882e1b80" path="/var/lib/kubelet/pods/8eef3990-f624-4694-9fc2-1043882e1b80/volumes" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.856803 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wvf7w"] Dec 01 19:42:11 crc kubenswrapper[4935]: E1201 19:42:11.858182 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eef3990-f624-4694-9fc2-1043882e1b80" containerName="extract-utilities" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.858205 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eef3990-f624-4694-9fc2-1043882e1b80" containerName="extract-utilities" Dec 01 19:42:11 crc kubenswrapper[4935]: E1201 19:42:11.858241 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eef3990-f624-4694-9fc2-1043882e1b80" containerName="extract-content" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.858254 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eef3990-f624-4694-9fc2-1043882e1b80" containerName="extract-content" Dec 01 19:42:11 crc kubenswrapper[4935]: E1201 19:42:11.858316 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eef3990-f624-4694-9fc2-1043882e1b80" containerName="registry-server" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.858329 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eef3990-f624-4694-9fc2-1043882e1b80" containerName="registry-server" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.858756 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="8eef3990-f624-4694-9fc2-1043882e1b80" containerName="registry-server" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.861914 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.870054 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wvf7w"] Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.889054 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12417cd5-a766-442f-ac2e-da48b21b03a8-utilities\") pod \"redhat-operators-wvf7w\" (UID: \"12417cd5-a766-442f-ac2e-da48b21b03a8\") " pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.889119 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12417cd5-a766-442f-ac2e-da48b21b03a8-catalog-content\") pod \"redhat-operators-wvf7w\" (UID: \"12417cd5-a766-442f-ac2e-da48b21b03a8\") " pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.889323 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l75h7\" (UniqueName: \"kubernetes.io/projected/12417cd5-a766-442f-ac2e-da48b21b03a8-kube-api-access-l75h7\") pod \"redhat-operators-wvf7w\" (UID: \"12417cd5-a766-442f-ac2e-da48b21b03a8\") " pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.991030 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12417cd5-a766-442f-ac2e-da48b21b03a8-utilities\") pod \"redhat-operators-wvf7w\" (UID: \"12417cd5-a766-442f-ac2e-da48b21b03a8\") " pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.991333 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12417cd5-a766-442f-ac2e-da48b21b03a8-catalog-content\") pod \"redhat-operators-wvf7w\" (UID: \"12417cd5-a766-442f-ac2e-da48b21b03a8\") " pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.991578 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l75h7\" (UniqueName: \"kubernetes.io/projected/12417cd5-a766-442f-ac2e-da48b21b03a8-kube-api-access-l75h7\") pod \"redhat-operators-wvf7w\" (UID: \"12417cd5-a766-442f-ac2e-da48b21b03a8\") " pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.991746 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12417cd5-a766-442f-ac2e-da48b21b03a8-utilities\") pod \"redhat-operators-wvf7w\" (UID: \"12417cd5-a766-442f-ac2e-da48b21b03a8\") " pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:11 crc kubenswrapper[4935]: I1201 19:42:11.991816 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12417cd5-a766-442f-ac2e-da48b21b03a8-catalog-content\") pod \"redhat-operators-wvf7w\" (UID: \"12417cd5-a766-442f-ac2e-da48b21b03a8\") " pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:12 crc kubenswrapper[4935]: I1201 19:42:12.011876 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l75h7\" (UniqueName: \"kubernetes.io/projected/12417cd5-a766-442f-ac2e-da48b21b03a8-kube-api-access-l75h7\") pod \"redhat-operators-wvf7w\" (UID: \"12417cd5-a766-442f-ac2e-da48b21b03a8\") " pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:12 crc kubenswrapper[4935]: I1201 19:42:12.188014 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:12 crc kubenswrapper[4935]: I1201 19:42:12.683549 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wvf7w"] Dec 01 19:42:13 crc kubenswrapper[4935]: I1201 19:42:13.597507 4935 generic.go:334] "Generic (PLEG): container finished" podID="12417cd5-a766-442f-ac2e-da48b21b03a8" containerID="8c3548e7c62d53cef896f7d612d795f21a5975ed3f3882e49102d9987667ba32" exitCode=0 Dec 01 19:42:13 crc kubenswrapper[4935]: I1201 19:42:13.597581 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wvf7w" event={"ID":"12417cd5-a766-442f-ac2e-da48b21b03a8","Type":"ContainerDied","Data":"8c3548e7c62d53cef896f7d612d795f21a5975ed3f3882e49102d9987667ba32"} Dec 01 19:42:13 crc kubenswrapper[4935]: I1201 19:42:13.597841 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wvf7w" event={"ID":"12417cd5-a766-442f-ac2e-da48b21b03a8","Type":"ContainerStarted","Data":"6814c2dc661b04ab60ad620609dc094f40630221666df93108ca66d428a8e31a"} Dec 01 19:42:15 crc kubenswrapper[4935]: I1201 19:42:15.508894 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:42:15 crc kubenswrapper[4935]: E1201 19:42:15.509689 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:42:15 crc kubenswrapper[4935]: I1201 19:42:15.640249 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wvf7w" event={"ID":"12417cd5-a766-442f-ac2e-da48b21b03a8","Type":"ContainerStarted","Data":"8bdd6acbb765386108f74fe2ba86f1726d030a6e8782d91eb1a87175c97250f9"} Dec 01 19:42:18 crc kubenswrapper[4935]: I1201 19:42:18.676785 4935 generic.go:334] "Generic (PLEG): container finished" podID="12417cd5-a766-442f-ac2e-da48b21b03a8" containerID="8bdd6acbb765386108f74fe2ba86f1726d030a6e8782d91eb1a87175c97250f9" exitCode=0 Dec 01 19:42:18 crc kubenswrapper[4935]: I1201 19:42:18.677216 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wvf7w" event={"ID":"12417cd5-a766-442f-ac2e-da48b21b03a8","Type":"ContainerDied","Data":"8bdd6acbb765386108f74fe2ba86f1726d030a6e8782d91eb1a87175c97250f9"} Dec 01 19:42:20 crc kubenswrapper[4935]: I1201 19:42:20.703121 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wvf7w" event={"ID":"12417cd5-a766-442f-ac2e-da48b21b03a8","Type":"ContainerStarted","Data":"1c490f5de7e0a3b459c6030afdf412f8b87801db636c70acd44c5b4cbcc5be72"} Dec 01 19:42:20 crc kubenswrapper[4935]: I1201 19:42:20.723217 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wvf7w" podStartSLOduration=3.711858632 podStartE2EDuration="9.723198439s" podCreationTimestamp="2025-12-01 19:42:11 +0000 UTC" firstStartedPulling="2025-12-01 19:42:13.600776898 +0000 UTC m=+4347.622406157" lastFinishedPulling="2025-12-01 19:42:19.612116705 +0000 UTC m=+4353.633745964" observedRunningTime="2025-12-01 19:42:20.720105747 +0000 UTC m=+4354.741735016" watchObservedRunningTime="2025-12-01 19:42:20.723198439 +0000 UTC m=+4354.744827698" Dec 01 19:42:22 crc kubenswrapper[4935]: I1201 19:42:22.188865 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:22 crc kubenswrapper[4935]: I1201 19:42:22.189868 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:23 crc kubenswrapper[4935]: I1201 19:42:23.458402 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wvf7w" podUID="12417cd5-a766-442f-ac2e-da48b21b03a8" containerName="registry-server" probeResult="failure" output=< Dec 01 19:42:23 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 19:42:23 crc kubenswrapper[4935]: > Dec 01 19:42:28 crc kubenswrapper[4935]: I1201 19:42:28.510406 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:42:28 crc kubenswrapper[4935]: E1201 19:42:28.511712 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:42:32 crc kubenswrapper[4935]: I1201 19:42:32.246429 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:32 crc kubenswrapper[4935]: I1201 19:42:32.296852 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:32 crc kubenswrapper[4935]: I1201 19:42:32.487729 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wvf7w"] Dec 01 19:42:33 crc kubenswrapper[4935]: I1201 19:42:33.886743 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wvf7w" podUID="12417cd5-a766-442f-ac2e-da48b21b03a8" containerName="registry-server" containerID="cri-o://1c490f5de7e0a3b459c6030afdf412f8b87801db636c70acd44c5b4cbcc5be72" gracePeriod=2 Dec 01 19:42:34 crc kubenswrapper[4935]: I1201 19:42:34.904018 4935 generic.go:334] "Generic (PLEG): container finished" podID="12417cd5-a766-442f-ac2e-da48b21b03a8" containerID="1c490f5de7e0a3b459c6030afdf412f8b87801db636c70acd44c5b4cbcc5be72" exitCode=0 Dec 01 19:42:34 crc kubenswrapper[4935]: I1201 19:42:34.904060 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wvf7w" event={"ID":"12417cd5-a766-442f-ac2e-da48b21b03a8","Type":"ContainerDied","Data":"1c490f5de7e0a3b459c6030afdf412f8b87801db636c70acd44c5b4cbcc5be72"} Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.314849 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.405517 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l75h7\" (UniqueName: \"kubernetes.io/projected/12417cd5-a766-442f-ac2e-da48b21b03a8-kube-api-access-l75h7\") pod \"12417cd5-a766-442f-ac2e-da48b21b03a8\" (UID: \"12417cd5-a766-442f-ac2e-da48b21b03a8\") " Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.405717 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12417cd5-a766-442f-ac2e-da48b21b03a8-utilities\") pod \"12417cd5-a766-442f-ac2e-da48b21b03a8\" (UID: \"12417cd5-a766-442f-ac2e-da48b21b03a8\") " Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.405752 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12417cd5-a766-442f-ac2e-da48b21b03a8-catalog-content\") pod \"12417cd5-a766-442f-ac2e-da48b21b03a8\" (UID: \"12417cd5-a766-442f-ac2e-da48b21b03a8\") " Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.406950 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12417cd5-a766-442f-ac2e-da48b21b03a8-utilities" (OuterVolumeSpecName: "utilities") pod "12417cd5-a766-442f-ac2e-da48b21b03a8" (UID: "12417cd5-a766-442f-ac2e-da48b21b03a8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.408029 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12417cd5-a766-442f-ac2e-da48b21b03a8-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.432314 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12417cd5-a766-442f-ac2e-da48b21b03a8-kube-api-access-l75h7" (OuterVolumeSpecName: "kube-api-access-l75h7") pod "12417cd5-a766-442f-ac2e-da48b21b03a8" (UID: "12417cd5-a766-442f-ac2e-da48b21b03a8"). InnerVolumeSpecName "kube-api-access-l75h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.510515 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l75h7\" (UniqueName: \"kubernetes.io/projected/12417cd5-a766-442f-ac2e-da48b21b03a8-kube-api-access-l75h7\") on node \"crc\" DevicePath \"\"" Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.583672 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12417cd5-a766-442f-ac2e-da48b21b03a8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12417cd5-a766-442f-ac2e-da48b21b03a8" (UID: "12417cd5-a766-442f-ac2e-da48b21b03a8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.613033 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12417cd5-a766-442f-ac2e-da48b21b03a8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.924292 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wvf7w" event={"ID":"12417cd5-a766-442f-ac2e-da48b21b03a8","Type":"ContainerDied","Data":"6814c2dc661b04ab60ad620609dc094f40630221666df93108ca66d428a8e31a"} Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.924369 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wvf7w" Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.924374 4935 scope.go:117] "RemoveContainer" containerID="1c490f5de7e0a3b459c6030afdf412f8b87801db636c70acd44c5b4cbcc5be72" Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.956734 4935 scope.go:117] "RemoveContainer" containerID="8bdd6acbb765386108f74fe2ba86f1726d030a6e8782d91eb1a87175c97250f9" Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.964966 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wvf7w"] Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.975924 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wvf7w"] Dec 01 19:42:35 crc kubenswrapper[4935]: I1201 19:42:35.982353 4935 scope.go:117] "RemoveContainer" containerID="8c3548e7c62d53cef896f7d612d795f21a5975ed3f3882e49102d9987667ba32" Dec 01 19:42:36 crc kubenswrapper[4935]: I1201 19:42:36.523708 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12417cd5-a766-442f-ac2e-da48b21b03a8" path="/var/lib/kubelet/pods/12417cd5-a766-442f-ac2e-da48b21b03a8/volumes" Dec 01 19:42:41 crc kubenswrapper[4935]: I1201 19:42:41.508722 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:42:41 crc kubenswrapper[4935]: E1201 19:42:41.509772 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:42:56 crc kubenswrapper[4935]: I1201 19:42:56.539540 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:42:56 crc kubenswrapper[4935]: E1201 19:42:56.541443 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:43:11 crc kubenswrapper[4935]: I1201 19:43:11.509127 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:43:11 crc kubenswrapper[4935]: E1201 19:43:11.510809 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:43:22 crc kubenswrapper[4935]: I1201 19:43:22.509353 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:43:22 crc kubenswrapper[4935]: E1201 19:43:22.511202 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:43:37 crc kubenswrapper[4935]: I1201 19:43:37.509056 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:43:37 crc kubenswrapper[4935]: E1201 19:43:37.510473 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:43:49 crc kubenswrapper[4935]: I1201 19:43:49.508942 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:43:49 crc kubenswrapper[4935]: E1201 19:43:49.510903 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:44:04 crc kubenswrapper[4935]: I1201 19:44:04.508937 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:44:04 crc kubenswrapper[4935]: E1201 19:44:04.510211 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:44:19 crc kubenswrapper[4935]: I1201 19:44:19.509426 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:44:19 crc kubenswrapper[4935]: E1201 19:44:19.510479 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:44:31 crc kubenswrapper[4935]: I1201 19:44:31.507860 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:44:31 crc kubenswrapper[4935]: E1201 19:44:31.508805 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:44:43 crc kubenswrapper[4935]: I1201 19:44:43.508469 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:44:43 crc kubenswrapper[4935]: E1201 19:44:43.509777 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:44:55 crc kubenswrapper[4935]: I1201 19:44:55.516241 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:44:55 crc kubenswrapper[4935]: E1201 19:44:55.525434 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.198080 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm"] Dec 01 19:45:00 crc kubenswrapper[4935]: E1201 19:45:00.199791 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12417cd5-a766-442f-ac2e-da48b21b03a8" containerName="extract-content" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.199813 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="12417cd5-a766-442f-ac2e-da48b21b03a8" containerName="extract-content" Dec 01 19:45:00 crc kubenswrapper[4935]: E1201 19:45:00.199831 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12417cd5-a766-442f-ac2e-da48b21b03a8" containerName="extract-utilities" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.199840 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="12417cd5-a766-442f-ac2e-da48b21b03a8" containerName="extract-utilities" Dec 01 19:45:00 crc kubenswrapper[4935]: E1201 19:45:00.199856 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12417cd5-a766-442f-ac2e-da48b21b03a8" containerName="registry-server" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.199866 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="12417cd5-a766-442f-ac2e-da48b21b03a8" containerName="registry-server" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.200235 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="12417cd5-a766-442f-ac2e-da48b21b03a8" containerName="registry-server" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.201303 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.204064 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.204129 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.222102 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm"] Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.286774 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c192058-a96d-43e6-9394-30536456ba21-secret-volume\") pod \"collect-profiles-29410305-xp2cm\" (UID: \"0c192058-a96d-43e6-9394-30536456ba21\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.286862 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c192058-a96d-43e6-9394-30536456ba21-config-volume\") pod \"collect-profiles-29410305-xp2cm\" (UID: \"0c192058-a96d-43e6-9394-30536456ba21\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.287578 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2vgf\" (UniqueName: \"kubernetes.io/projected/0c192058-a96d-43e6-9394-30536456ba21-kube-api-access-q2vgf\") pod \"collect-profiles-29410305-xp2cm\" (UID: \"0c192058-a96d-43e6-9394-30536456ba21\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.389010 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2vgf\" (UniqueName: \"kubernetes.io/projected/0c192058-a96d-43e6-9394-30536456ba21-kube-api-access-q2vgf\") pod \"collect-profiles-29410305-xp2cm\" (UID: \"0c192058-a96d-43e6-9394-30536456ba21\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.389068 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c192058-a96d-43e6-9394-30536456ba21-secret-volume\") pod \"collect-profiles-29410305-xp2cm\" (UID: \"0c192058-a96d-43e6-9394-30536456ba21\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.389102 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c192058-a96d-43e6-9394-30536456ba21-config-volume\") pod \"collect-profiles-29410305-xp2cm\" (UID: \"0c192058-a96d-43e6-9394-30536456ba21\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.390124 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c192058-a96d-43e6-9394-30536456ba21-config-volume\") pod \"collect-profiles-29410305-xp2cm\" (UID: \"0c192058-a96d-43e6-9394-30536456ba21\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.401314 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c192058-a96d-43e6-9394-30536456ba21-secret-volume\") pod \"collect-profiles-29410305-xp2cm\" (UID: \"0c192058-a96d-43e6-9394-30536456ba21\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.410919 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2vgf\" (UniqueName: \"kubernetes.io/projected/0c192058-a96d-43e6-9394-30536456ba21-kube-api-access-q2vgf\") pod \"collect-profiles-29410305-xp2cm\" (UID: \"0c192058-a96d-43e6-9394-30536456ba21\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" Dec 01 19:45:00 crc kubenswrapper[4935]: I1201 19:45:00.531376 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" Dec 01 19:45:01 crc kubenswrapper[4935]: I1201 19:45:01.028052 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm"] Dec 01 19:45:01 crc kubenswrapper[4935]: W1201 19:45:01.034515 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c192058_a96d_43e6_9394_30536456ba21.slice/crio-e0b6c7056fcae353985b024dc53fec2969ed63da90e4b7f819fcfc8564be7af1 WatchSource:0}: Error finding container e0b6c7056fcae353985b024dc53fec2969ed63da90e4b7f819fcfc8564be7af1: Status 404 returned error can't find the container with id e0b6c7056fcae353985b024dc53fec2969ed63da90e4b7f819fcfc8564be7af1 Dec 01 19:45:01 crc kubenswrapper[4935]: I1201 19:45:01.781892 4935 generic.go:334] "Generic (PLEG): container finished" podID="0c192058-a96d-43e6-9394-30536456ba21" containerID="896ca5ede6298d0a82f47a429d22bd6c52880367da4799e31cd2b5ec1fc5082d" exitCode=0 Dec 01 19:45:01 crc kubenswrapper[4935]: I1201 19:45:01.782087 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" event={"ID":"0c192058-a96d-43e6-9394-30536456ba21","Type":"ContainerDied","Data":"896ca5ede6298d0a82f47a429d22bd6c52880367da4799e31cd2b5ec1fc5082d"} Dec 01 19:45:01 crc kubenswrapper[4935]: I1201 19:45:01.782317 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" event={"ID":"0c192058-a96d-43e6-9394-30536456ba21","Type":"ContainerStarted","Data":"e0b6c7056fcae353985b024dc53fec2969ed63da90e4b7f819fcfc8564be7af1"} Dec 01 19:45:03 crc kubenswrapper[4935]: I1201 19:45:03.218329 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" Dec 01 19:45:03 crc kubenswrapper[4935]: I1201 19:45:03.259226 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2vgf\" (UniqueName: \"kubernetes.io/projected/0c192058-a96d-43e6-9394-30536456ba21-kube-api-access-q2vgf\") pod \"0c192058-a96d-43e6-9394-30536456ba21\" (UID: \"0c192058-a96d-43e6-9394-30536456ba21\") " Dec 01 19:45:03 crc kubenswrapper[4935]: I1201 19:45:03.259378 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c192058-a96d-43e6-9394-30536456ba21-secret-volume\") pod \"0c192058-a96d-43e6-9394-30536456ba21\" (UID: \"0c192058-a96d-43e6-9394-30536456ba21\") " Dec 01 19:45:03 crc kubenswrapper[4935]: I1201 19:45:03.259506 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c192058-a96d-43e6-9394-30536456ba21-config-volume\") pod \"0c192058-a96d-43e6-9394-30536456ba21\" (UID: \"0c192058-a96d-43e6-9394-30536456ba21\") " Dec 01 19:45:03 crc kubenswrapper[4935]: I1201 19:45:03.260255 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c192058-a96d-43e6-9394-30536456ba21-config-volume" (OuterVolumeSpecName: "config-volume") pod "0c192058-a96d-43e6-9394-30536456ba21" (UID: "0c192058-a96d-43e6-9394-30536456ba21"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:45:03 crc kubenswrapper[4935]: I1201 19:45:03.266010 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c192058-a96d-43e6-9394-30536456ba21-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0c192058-a96d-43e6-9394-30536456ba21" (UID: "0c192058-a96d-43e6-9394-30536456ba21"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:45:03 crc kubenswrapper[4935]: I1201 19:45:03.266131 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c192058-a96d-43e6-9394-30536456ba21-kube-api-access-q2vgf" (OuterVolumeSpecName: "kube-api-access-q2vgf") pod "0c192058-a96d-43e6-9394-30536456ba21" (UID: "0c192058-a96d-43e6-9394-30536456ba21"). InnerVolumeSpecName "kube-api-access-q2vgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:45:03 crc kubenswrapper[4935]: I1201 19:45:03.362533 4935 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c192058-a96d-43e6-9394-30536456ba21-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 19:45:03 crc kubenswrapper[4935]: I1201 19:45:03.362578 4935 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c192058-a96d-43e6-9394-30536456ba21-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 19:45:03 crc kubenswrapper[4935]: I1201 19:45:03.362590 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2vgf\" (UniqueName: \"kubernetes.io/projected/0c192058-a96d-43e6-9394-30536456ba21-kube-api-access-q2vgf\") on node \"crc\" DevicePath \"\"" Dec 01 19:45:03 crc kubenswrapper[4935]: I1201 19:45:03.814703 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" event={"ID":"0c192058-a96d-43e6-9394-30536456ba21","Type":"ContainerDied","Data":"e0b6c7056fcae353985b024dc53fec2969ed63da90e4b7f819fcfc8564be7af1"} Dec 01 19:45:03 crc kubenswrapper[4935]: I1201 19:45:03.815162 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0b6c7056fcae353985b024dc53fec2969ed63da90e4b7f819fcfc8564be7af1" Dec 01 19:45:03 crc kubenswrapper[4935]: I1201 19:45:03.814781 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-xp2cm" Dec 01 19:45:04 crc kubenswrapper[4935]: I1201 19:45:04.294454 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf"] Dec 01 19:45:04 crc kubenswrapper[4935]: I1201 19:45:04.304598 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410260-q2wwf"] Dec 01 19:45:04 crc kubenswrapper[4935]: I1201 19:45:04.533036 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec6a2572-33e9-4baf-965e-dc529220bd30" path="/var/lib/kubelet/pods/ec6a2572-33e9-4baf-965e-dc529220bd30/volumes" Dec 01 19:45:06 crc kubenswrapper[4935]: I1201 19:45:06.521468 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:45:06 crc kubenswrapper[4935]: E1201 19:45:06.522484 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:45:11 crc kubenswrapper[4935]: I1201 19:45:11.749780 4935 scope.go:117] "RemoveContainer" containerID="2fe3214fbccb68b65de05277def3e067647ccb73c7d17f90dcbe3d2fb1aaff4b" Dec 01 19:45:18 crc kubenswrapper[4935]: I1201 19:45:18.508079 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:45:18 crc kubenswrapper[4935]: E1201 19:45:18.508952 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:45:32 crc kubenswrapper[4935]: I1201 19:45:32.509666 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:45:32 crc kubenswrapper[4935]: E1201 19:45:32.510769 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:45:44 crc kubenswrapper[4935]: I1201 19:45:44.508719 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:45:44 crc kubenswrapper[4935]: E1201 19:45:44.509721 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:45:56 crc kubenswrapper[4935]: I1201 19:45:56.526242 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:45:56 crc kubenswrapper[4935]: E1201 19:45:56.527556 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:46:10 crc kubenswrapper[4935]: I1201 19:46:10.508441 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:46:10 crc kubenswrapper[4935]: E1201 19:46:10.509614 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:46:25 crc kubenswrapper[4935]: I1201 19:46:25.509448 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:46:25 crc kubenswrapper[4935]: I1201 19:46:25.816355 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"d068dbefcec89149f9c1bdae291e37c56e8d8233e38f213a4a6697084210d042"} Dec 01 19:48:54 crc kubenswrapper[4935]: I1201 19:48:54.346196 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:48:54 crc kubenswrapper[4935]: I1201 19:48:54.347579 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:49:24 crc kubenswrapper[4935]: I1201 19:49:24.346502 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:49:24 crc kubenswrapper[4935]: I1201 19:49:24.346964 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:49:54 crc kubenswrapper[4935]: I1201 19:49:54.346678 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:49:54 crc kubenswrapper[4935]: I1201 19:49:54.348615 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:49:54 crc kubenswrapper[4935]: I1201 19:49:54.348750 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 19:49:54 crc kubenswrapper[4935]: I1201 19:49:54.350246 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d068dbefcec89149f9c1bdae291e37c56e8d8233e38f213a4a6697084210d042"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:49:54 crc kubenswrapper[4935]: I1201 19:49:54.350359 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://d068dbefcec89149f9c1bdae291e37c56e8d8233e38f213a4a6697084210d042" gracePeriod=600 Dec 01 19:49:54 crc kubenswrapper[4935]: I1201 19:49:54.792111 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="d068dbefcec89149f9c1bdae291e37c56e8d8233e38f213a4a6697084210d042" exitCode=0 Dec 01 19:49:54 crc kubenswrapper[4935]: I1201 19:49:54.792142 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"d068dbefcec89149f9c1bdae291e37c56e8d8233e38f213a4a6697084210d042"} Dec 01 19:49:54 crc kubenswrapper[4935]: I1201 19:49:54.792444 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185"} Dec 01 19:49:54 crc kubenswrapper[4935]: I1201 19:49:54.792462 4935 scope.go:117] "RemoveContainer" containerID="183de19f68725cb386a51b51bd7d9c97b0bd78159bdc0badcbd77a56d73247a5" Dec 01 19:50:06 crc kubenswrapper[4935]: I1201 19:50:06.767524 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6whwt"] Dec 01 19:50:06 crc kubenswrapper[4935]: E1201 19:50:06.768653 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c192058-a96d-43e6-9394-30536456ba21" containerName="collect-profiles" Dec 01 19:50:06 crc kubenswrapper[4935]: I1201 19:50:06.768676 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c192058-a96d-43e6-9394-30536456ba21" containerName="collect-profiles" Dec 01 19:50:06 crc kubenswrapper[4935]: I1201 19:50:06.769072 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c192058-a96d-43e6-9394-30536456ba21" containerName="collect-profiles" Dec 01 19:50:06 crc kubenswrapper[4935]: I1201 19:50:06.771964 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:06 crc kubenswrapper[4935]: I1201 19:50:06.787063 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6whwt"] Dec 01 19:50:06 crc kubenswrapper[4935]: I1201 19:50:06.926450 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a50c0e9-58ec-4670-ba5c-43a214fa309e-utilities\") pod \"community-operators-6whwt\" (UID: \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\") " pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:06 crc kubenswrapper[4935]: I1201 19:50:06.926633 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a50c0e9-58ec-4670-ba5c-43a214fa309e-catalog-content\") pod \"community-operators-6whwt\" (UID: \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\") " pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:06 crc kubenswrapper[4935]: I1201 19:50:06.926756 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2plq\" (UniqueName: \"kubernetes.io/projected/6a50c0e9-58ec-4670-ba5c-43a214fa309e-kube-api-access-k2plq\") pod \"community-operators-6whwt\" (UID: \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\") " pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:07 crc kubenswrapper[4935]: I1201 19:50:07.028607 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2plq\" (UniqueName: \"kubernetes.io/projected/6a50c0e9-58ec-4670-ba5c-43a214fa309e-kube-api-access-k2plq\") pod \"community-operators-6whwt\" (UID: \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\") " pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:07 crc kubenswrapper[4935]: I1201 19:50:07.029180 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a50c0e9-58ec-4670-ba5c-43a214fa309e-utilities\") pod \"community-operators-6whwt\" (UID: \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\") " pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:07 crc kubenswrapper[4935]: I1201 19:50:07.029378 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a50c0e9-58ec-4670-ba5c-43a214fa309e-catalog-content\") pod \"community-operators-6whwt\" (UID: \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\") " pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:07 crc kubenswrapper[4935]: I1201 19:50:07.029537 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a50c0e9-58ec-4670-ba5c-43a214fa309e-utilities\") pod \"community-operators-6whwt\" (UID: \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\") " pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:07 crc kubenswrapper[4935]: I1201 19:50:07.029875 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a50c0e9-58ec-4670-ba5c-43a214fa309e-catalog-content\") pod \"community-operators-6whwt\" (UID: \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\") " pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:07 crc kubenswrapper[4935]: I1201 19:50:07.053378 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2plq\" (UniqueName: \"kubernetes.io/projected/6a50c0e9-58ec-4670-ba5c-43a214fa309e-kube-api-access-k2plq\") pod \"community-operators-6whwt\" (UID: \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\") " pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:07 crc kubenswrapper[4935]: I1201 19:50:07.106340 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:07 crc kubenswrapper[4935]: I1201 19:50:07.671988 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6whwt"] Dec 01 19:50:07 crc kubenswrapper[4935]: I1201 19:50:07.974264 4935 generic.go:334] "Generic (PLEG): container finished" podID="6a50c0e9-58ec-4670-ba5c-43a214fa309e" containerID="0214d1a4ad4aa69516b23135e7287c44180017df498b1387d1738f6f0e175e9a" exitCode=0 Dec 01 19:50:07 crc kubenswrapper[4935]: I1201 19:50:07.974381 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6whwt" event={"ID":"6a50c0e9-58ec-4670-ba5c-43a214fa309e","Type":"ContainerDied","Data":"0214d1a4ad4aa69516b23135e7287c44180017df498b1387d1738f6f0e175e9a"} Dec 01 19:50:07 crc kubenswrapper[4935]: I1201 19:50:07.974550 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6whwt" event={"ID":"6a50c0e9-58ec-4670-ba5c-43a214fa309e","Type":"ContainerStarted","Data":"805ad0e140e8cead1f2970ddf519dd44232c1f24db0ab1a9247ab1e73dd68984"} Dec 01 19:50:07 crc kubenswrapper[4935]: I1201 19:50:07.976909 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 19:50:08 crc kubenswrapper[4935]: I1201 19:50:08.987369 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6whwt" event={"ID":"6a50c0e9-58ec-4670-ba5c-43a214fa309e","Type":"ContainerStarted","Data":"e2aa6f266e128bddcb7f96865f2c897ce47844b0ee15c8953b3170aff421507a"} Dec 01 19:50:10 crc kubenswrapper[4935]: I1201 19:50:10.003537 4935 generic.go:334] "Generic (PLEG): container finished" podID="6a50c0e9-58ec-4670-ba5c-43a214fa309e" containerID="e2aa6f266e128bddcb7f96865f2c897ce47844b0ee15c8953b3170aff421507a" exitCode=0 Dec 01 19:50:10 crc kubenswrapper[4935]: I1201 19:50:10.003664 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6whwt" event={"ID":"6a50c0e9-58ec-4670-ba5c-43a214fa309e","Type":"ContainerDied","Data":"e2aa6f266e128bddcb7f96865f2c897ce47844b0ee15c8953b3170aff421507a"} Dec 01 19:50:11 crc kubenswrapper[4935]: I1201 19:50:11.018769 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6whwt" event={"ID":"6a50c0e9-58ec-4670-ba5c-43a214fa309e","Type":"ContainerStarted","Data":"467beb2ffb17ae10563b48126b548f921cbdfb746bb2e6e6ad9223050b89cc8d"} Dec 01 19:50:11 crc kubenswrapper[4935]: I1201 19:50:11.034899 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6whwt" podStartSLOduration=2.517781009 podStartE2EDuration="5.034881456s" podCreationTimestamp="2025-12-01 19:50:06 +0000 UTC" firstStartedPulling="2025-12-01 19:50:07.976684279 +0000 UTC m=+4821.998313538" lastFinishedPulling="2025-12-01 19:50:10.493784726 +0000 UTC m=+4824.515413985" observedRunningTime="2025-12-01 19:50:11.033927016 +0000 UTC m=+4825.055556285" watchObservedRunningTime="2025-12-01 19:50:11.034881456 +0000 UTC m=+4825.056510715" Dec 01 19:50:17 crc kubenswrapper[4935]: I1201 19:50:17.107007 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:17 crc kubenswrapper[4935]: I1201 19:50:17.107514 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:17 crc kubenswrapper[4935]: I1201 19:50:17.160275 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:18 crc kubenswrapper[4935]: I1201 19:50:18.167646 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:18 crc kubenswrapper[4935]: I1201 19:50:18.253020 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6whwt"] Dec 01 19:50:20 crc kubenswrapper[4935]: I1201 19:50:20.141748 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6whwt" podUID="6a50c0e9-58ec-4670-ba5c-43a214fa309e" containerName="registry-server" containerID="cri-o://467beb2ffb17ae10563b48126b548f921cbdfb746bb2e6e6ad9223050b89cc8d" gracePeriod=2 Dec 01 19:50:20 crc kubenswrapper[4935]: I1201 19:50:20.699825 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:20 crc kubenswrapper[4935]: I1201 19:50:20.789141 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2plq\" (UniqueName: \"kubernetes.io/projected/6a50c0e9-58ec-4670-ba5c-43a214fa309e-kube-api-access-k2plq\") pod \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\" (UID: \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\") " Dec 01 19:50:20 crc kubenswrapper[4935]: I1201 19:50:20.789495 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a50c0e9-58ec-4670-ba5c-43a214fa309e-catalog-content\") pod \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\" (UID: \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\") " Dec 01 19:50:20 crc kubenswrapper[4935]: I1201 19:50:20.789532 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a50c0e9-58ec-4670-ba5c-43a214fa309e-utilities\") pod \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\" (UID: \"6a50c0e9-58ec-4670-ba5c-43a214fa309e\") " Dec 01 19:50:20 crc kubenswrapper[4935]: I1201 19:50:20.790413 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a50c0e9-58ec-4670-ba5c-43a214fa309e-utilities" (OuterVolumeSpecName: "utilities") pod "6a50c0e9-58ec-4670-ba5c-43a214fa309e" (UID: "6a50c0e9-58ec-4670-ba5c-43a214fa309e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:50:20 crc kubenswrapper[4935]: I1201 19:50:20.799325 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a50c0e9-58ec-4670-ba5c-43a214fa309e-kube-api-access-k2plq" (OuterVolumeSpecName: "kube-api-access-k2plq") pod "6a50c0e9-58ec-4670-ba5c-43a214fa309e" (UID: "6a50c0e9-58ec-4670-ba5c-43a214fa309e"). InnerVolumeSpecName "kube-api-access-k2plq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:20 crc kubenswrapper[4935]: I1201 19:50:20.841896 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a50c0e9-58ec-4670-ba5c-43a214fa309e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6a50c0e9-58ec-4670-ba5c-43a214fa309e" (UID: "6a50c0e9-58ec-4670-ba5c-43a214fa309e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:50:20 crc kubenswrapper[4935]: I1201 19:50:20.892306 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a50c0e9-58ec-4670-ba5c-43a214fa309e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:20 crc kubenswrapper[4935]: I1201 19:50:20.892346 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a50c0e9-58ec-4670-ba5c-43a214fa309e-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:20 crc kubenswrapper[4935]: I1201 19:50:20.892360 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2plq\" (UniqueName: \"kubernetes.io/projected/6a50c0e9-58ec-4670-ba5c-43a214fa309e-kube-api-access-k2plq\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.163223 4935 generic.go:334] "Generic (PLEG): container finished" podID="6a50c0e9-58ec-4670-ba5c-43a214fa309e" containerID="467beb2ffb17ae10563b48126b548f921cbdfb746bb2e6e6ad9223050b89cc8d" exitCode=0 Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.163297 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6whwt" event={"ID":"6a50c0e9-58ec-4670-ba5c-43a214fa309e","Type":"ContainerDied","Data":"467beb2ffb17ae10563b48126b548f921cbdfb746bb2e6e6ad9223050b89cc8d"} Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.163342 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6whwt" event={"ID":"6a50c0e9-58ec-4670-ba5c-43a214fa309e","Type":"ContainerDied","Data":"805ad0e140e8cead1f2970ddf519dd44232c1f24db0ab1a9247ab1e73dd68984"} Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.163401 4935 scope.go:117] "RemoveContainer" containerID="467beb2ffb17ae10563b48126b548f921cbdfb746bb2e6e6ad9223050b89cc8d" Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.163436 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6whwt" Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.190176 4935 scope.go:117] "RemoveContainer" containerID="e2aa6f266e128bddcb7f96865f2c897ce47844b0ee15c8953b3170aff421507a" Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.223800 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6whwt"] Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.242222 4935 scope.go:117] "RemoveContainer" containerID="0214d1a4ad4aa69516b23135e7287c44180017df498b1387d1738f6f0e175e9a" Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.244865 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6whwt"] Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.293089 4935 scope.go:117] "RemoveContainer" containerID="467beb2ffb17ae10563b48126b548f921cbdfb746bb2e6e6ad9223050b89cc8d" Dec 01 19:50:21 crc kubenswrapper[4935]: E1201 19:50:21.293674 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"467beb2ffb17ae10563b48126b548f921cbdfb746bb2e6e6ad9223050b89cc8d\": container with ID starting with 467beb2ffb17ae10563b48126b548f921cbdfb746bb2e6e6ad9223050b89cc8d not found: ID does not exist" containerID="467beb2ffb17ae10563b48126b548f921cbdfb746bb2e6e6ad9223050b89cc8d" Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.293739 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"467beb2ffb17ae10563b48126b548f921cbdfb746bb2e6e6ad9223050b89cc8d"} err="failed to get container status \"467beb2ffb17ae10563b48126b548f921cbdfb746bb2e6e6ad9223050b89cc8d\": rpc error: code = NotFound desc = could not find container \"467beb2ffb17ae10563b48126b548f921cbdfb746bb2e6e6ad9223050b89cc8d\": container with ID starting with 467beb2ffb17ae10563b48126b548f921cbdfb746bb2e6e6ad9223050b89cc8d not found: ID does not exist" Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.293780 4935 scope.go:117] "RemoveContainer" containerID="e2aa6f266e128bddcb7f96865f2c897ce47844b0ee15c8953b3170aff421507a" Dec 01 19:50:21 crc kubenswrapper[4935]: E1201 19:50:21.294171 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2aa6f266e128bddcb7f96865f2c897ce47844b0ee15c8953b3170aff421507a\": container with ID starting with e2aa6f266e128bddcb7f96865f2c897ce47844b0ee15c8953b3170aff421507a not found: ID does not exist" containerID="e2aa6f266e128bddcb7f96865f2c897ce47844b0ee15c8953b3170aff421507a" Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.294227 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2aa6f266e128bddcb7f96865f2c897ce47844b0ee15c8953b3170aff421507a"} err="failed to get container status \"e2aa6f266e128bddcb7f96865f2c897ce47844b0ee15c8953b3170aff421507a\": rpc error: code = NotFound desc = could not find container \"e2aa6f266e128bddcb7f96865f2c897ce47844b0ee15c8953b3170aff421507a\": container with ID starting with e2aa6f266e128bddcb7f96865f2c897ce47844b0ee15c8953b3170aff421507a not found: ID does not exist" Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.294261 4935 scope.go:117] "RemoveContainer" containerID="0214d1a4ad4aa69516b23135e7287c44180017df498b1387d1738f6f0e175e9a" Dec 01 19:50:21 crc kubenswrapper[4935]: E1201 19:50:21.294780 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0214d1a4ad4aa69516b23135e7287c44180017df498b1387d1738f6f0e175e9a\": container with ID starting with 0214d1a4ad4aa69516b23135e7287c44180017df498b1387d1738f6f0e175e9a not found: ID does not exist" containerID="0214d1a4ad4aa69516b23135e7287c44180017df498b1387d1738f6f0e175e9a" Dec 01 19:50:21 crc kubenswrapper[4935]: I1201 19:50:21.294812 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0214d1a4ad4aa69516b23135e7287c44180017df498b1387d1738f6f0e175e9a"} err="failed to get container status \"0214d1a4ad4aa69516b23135e7287c44180017df498b1387d1738f6f0e175e9a\": rpc error: code = NotFound desc = could not find container \"0214d1a4ad4aa69516b23135e7287c44180017df498b1387d1738f6f0e175e9a\": container with ID starting with 0214d1a4ad4aa69516b23135e7287c44180017df498b1387d1738f6f0e175e9a not found: ID does not exist" Dec 01 19:50:22 crc kubenswrapper[4935]: I1201 19:50:22.536013 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a50c0e9-58ec-4670-ba5c-43a214fa309e" path="/var/lib/kubelet/pods/6a50c0e9-58ec-4670-ba5c-43a214fa309e/volumes" Dec 01 19:51:54 crc kubenswrapper[4935]: I1201 19:51:54.345792 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:51:54 crc kubenswrapper[4935]: I1201 19:51:54.346470 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.565567 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xsnh5"] Dec 01 19:52:06 crc kubenswrapper[4935]: E1201 19:52:06.566578 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a50c0e9-58ec-4670-ba5c-43a214fa309e" containerName="registry-server" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.566594 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a50c0e9-58ec-4670-ba5c-43a214fa309e" containerName="registry-server" Dec 01 19:52:06 crc kubenswrapper[4935]: E1201 19:52:06.566633 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a50c0e9-58ec-4670-ba5c-43a214fa309e" containerName="extract-utilities" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.566643 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a50c0e9-58ec-4670-ba5c-43a214fa309e" containerName="extract-utilities" Dec 01 19:52:06 crc kubenswrapper[4935]: E1201 19:52:06.566678 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a50c0e9-58ec-4670-ba5c-43a214fa309e" containerName="extract-content" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.566686 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a50c0e9-58ec-4670-ba5c-43a214fa309e" containerName="extract-content" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.566997 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a50c0e9-58ec-4670-ba5c-43a214fa309e" containerName="registry-server" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.569143 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.619252 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xsnh5"] Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.659194 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa7ede37-e261-4756-a60a-cc8c378b4d10-catalog-content\") pod \"certified-operators-xsnh5\" (UID: \"aa7ede37-e261-4756-a60a-cc8c378b4d10\") " pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.659312 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmnc5\" (UniqueName: \"kubernetes.io/projected/aa7ede37-e261-4756-a60a-cc8c378b4d10-kube-api-access-jmnc5\") pod \"certified-operators-xsnh5\" (UID: \"aa7ede37-e261-4756-a60a-cc8c378b4d10\") " pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.659382 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa7ede37-e261-4756-a60a-cc8c378b4d10-utilities\") pod \"certified-operators-xsnh5\" (UID: \"aa7ede37-e261-4756-a60a-cc8c378b4d10\") " pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.761621 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa7ede37-e261-4756-a60a-cc8c378b4d10-utilities\") pod \"certified-operators-xsnh5\" (UID: \"aa7ede37-e261-4756-a60a-cc8c378b4d10\") " pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.761895 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa7ede37-e261-4756-a60a-cc8c378b4d10-catalog-content\") pod \"certified-operators-xsnh5\" (UID: \"aa7ede37-e261-4756-a60a-cc8c378b4d10\") " pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.762120 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa7ede37-e261-4756-a60a-cc8c378b4d10-utilities\") pod \"certified-operators-xsnh5\" (UID: \"aa7ede37-e261-4756-a60a-cc8c378b4d10\") " pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.762387 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa7ede37-e261-4756-a60a-cc8c378b4d10-catalog-content\") pod \"certified-operators-xsnh5\" (UID: \"aa7ede37-e261-4756-a60a-cc8c378b4d10\") " pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.762513 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmnc5\" (UniqueName: \"kubernetes.io/projected/aa7ede37-e261-4756-a60a-cc8c378b4d10-kube-api-access-jmnc5\") pod \"certified-operators-xsnh5\" (UID: \"aa7ede37-e261-4756-a60a-cc8c378b4d10\") " pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.788050 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmnc5\" (UniqueName: \"kubernetes.io/projected/aa7ede37-e261-4756-a60a-cc8c378b4d10-kube-api-access-jmnc5\") pod \"certified-operators-xsnh5\" (UID: \"aa7ede37-e261-4756-a60a-cc8c378b4d10\") " pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:06 crc kubenswrapper[4935]: I1201 19:52:06.911703 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:07 crc kubenswrapper[4935]: I1201 19:52:07.503594 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xsnh5"] Dec 01 19:52:07 crc kubenswrapper[4935]: I1201 19:52:07.611016 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xsnh5" event={"ID":"aa7ede37-e261-4756-a60a-cc8c378b4d10","Type":"ContainerStarted","Data":"21f696eb07b87a3dd0561454a39ba2bc4277cbc0a5cdf199bff9fef48a5a31f2"} Dec 01 19:52:08 crc kubenswrapper[4935]: I1201 19:52:08.629048 4935 generic.go:334] "Generic (PLEG): container finished" podID="aa7ede37-e261-4756-a60a-cc8c378b4d10" containerID="93f22c170a6968c2d325a6e50ed9644dbca08b7292a2685af74a6e632613a545" exitCode=0 Dec 01 19:52:08 crc kubenswrapper[4935]: I1201 19:52:08.629403 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xsnh5" event={"ID":"aa7ede37-e261-4756-a60a-cc8c378b4d10","Type":"ContainerDied","Data":"93f22c170a6968c2d325a6e50ed9644dbca08b7292a2685af74a6e632613a545"} Dec 01 19:52:10 crc kubenswrapper[4935]: I1201 19:52:10.659545 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xsnh5" event={"ID":"aa7ede37-e261-4756-a60a-cc8c378b4d10","Type":"ContainerStarted","Data":"9f6a2962219a4abc87f29826be8920cc9a6aec2fe1f289ea5324cc2f18327d39"} Dec 01 19:52:11 crc kubenswrapper[4935]: I1201 19:52:11.675563 4935 generic.go:334] "Generic (PLEG): container finished" podID="aa7ede37-e261-4756-a60a-cc8c378b4d10" containerID="9f6a2962219a4abc87f29826be8920cc9a6aec2fe1f289ea5324cc2f18327d39" exitCode=0 Dec 01 19:52:11 crc kubenswrapper[4935]: I1201 19:52:11.675836 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xsnh5" event={"ID":"aa7ede37-e261-4756-a60a-cc8c378b4d10","Type":"ContainerDied","Data":"9f6a2962219a4abc87f29826be8920cc9a6aec2fe1f289ea5324cc2f18327d39"} Dec 01 19:52:12 crc kubenswrapper[4935]: I1201 19:52:12.691494 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xsnh5" event={"ID":"aa7ede37-e261-4756-a60a-cc8c378b4d10","Type":"ContainerStarted","Data":"726deda634c12748b2380b34094fbfa23172641a2df00cd4b90d0248f765d489"} Dec 01 19:52:12 crc kubenswrapper[4935]: I1201 19:52:12.736041 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xsnh5" podStartSLOduration=3.266351518 podStartE2EDuration="6.735952491s" podCreationTimestamp="2025-12-01 19:52:06 +0000 UTC" firstStartedPulling="2025-12-01 19:52:08.63239125 +0000 UTC m=+4942.654020509" lastFinishedPulling="2025-12-01 19:52:12.101992223 +0000 UTC m=+4946.123621482" observedRunningTime="2025-12-01 19:52:12.728542648 +0000 UTC m=+4946.750171917" watchObservedRunningTime="2025-12-01 19:52:12.735952491 +0000 UTC m=+4946.757581750" Dec 01 19:52:16 crc kubenswrapper[4935]: I1201 19:52:16.911968 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:16 crc kubenswrapper[4935]: I1201 19:52:16.912609 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:16 crc kubenswrapper[4935]: I1201 19:52:16.968746 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:17 crc kubenswrapper[4935]: I1201 19:52:17.819841 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:17 crc kubenswrapper[4935]: I1201 19:52:17.887345 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xsnh5"] Dec 01 19:52:19 crc kubenswrapper[4935]: I1201 19:52:19.769229 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xsnh5" podUID="aa7ede37-e261-4756-a60a-cc8c378b4d10" containerName="registry-server" containerID="cri-o://726deda634c12748b2380b34094fbfa23172641a2df00cd4b90d0248f765d489" gracePeriod=2 Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.320445 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.427676 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa7ede37-e261-4756-a60a-cc8c378b4d10-utilities\") pod \"aa7ede37-e261-4756-a60a-cc8c378b4d10\" (UID: \"aa7ede37-e261-4756-a60a-cc8c378b4d10\") " Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.427902 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmnc5\" (UniqueName: \"kubernetes.io/projected/aa7ede37-e261-4756-a60a-cc8c378b4d10-kube-api-access-jmnc5\") pod \"aa7ede37-e261-4756-a60a-cc8c378b4d10\" (UID: \"aa7ede37-e261-4756-a60a-cc8c378b4d10\") " Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.427938 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa7ede37-e261-4756-a60a-cc8c378b4d10-catalog-content\") pod \"aa7ede37-e261-4756-a60a-cc8c378b4d10\" (UID: \"aa7ede37-e261-4756-a60a-cc8c378b4d10\") " Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.429722 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa7ede37-e261-4756-a60a-cc8c378b4d10-utilities" (OuterVolumeSpecName: "utilities") pod "aa7ede37-e261-4756-a60a-cc8c378b4d10" (UID: "aa7ede37-e261-4756-a60a-cc8c378b4d10"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.436305 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa7ede37-e261-4756-a60a-cc8c378b4d10-kube-api-access-jmnc5" (OuterVolumeSpecName: "kube-api-access-jmnc5") pod "aa7ede37-e261-4756-a60a-cc8c378b4d10" (UID: "aa7ede37-e261-4756-a60a-cc8c378b4d10"). InnerVolumeSpecName "kube-api-access-jmnc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.588554 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa7ede37-e261-4756-a60a-cc8c378b4d10-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aa7ede37-e261-4756-a60a-cc8c378b4d10" (UID: "aa7ede37-e261-4756-a60a-cc8c378b4d10"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.602554 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa7ede37-e261-4756-a60a-cc8c378b4d10-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.602586 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmnc5\" (UniqueName: \"kubernetes.io/projected/aa7ede37-e261-4756-a60a-cc8c378b4d10-kube-api-access-jmnc5\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.602602 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa7ede37-e261-4756-a60a-cc8c378b4d10-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.781319 4935 generic.go:334] "Generic (PLEG): container finished" podID="aa7ede37-e261-4756-a60a-cc8c378b4d10" containerID="726deda634c12748b2380b34094fbfa23172641a2df00cd4b90d0248f765d489" exitCode=0 Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.781380 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xsnh5" event={"ID":"aa7ede37-e261-4756-a60a-cc8c378b4d10","Type":"ContainerDied","Data":"726deda634c12748b2380b34094fbfa23172641a2df00cd4b90d0248f765d489"} Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.781404 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xsnh5" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.781414 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xsnh5" event={"ID":"aa7ede37-e261-4756-a60a-cc8c378b4d10","Type":"ContainerDied","Data":"21f696eb07b87a3dd0561454a39ba2bc4277cbc0a5cdf199bff9fef48a5a31f2"} Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.781434 4935 scope.go:117] "RemoveContainer" containerID="726deda634c12748b2380b34094fbfa23172641a2df00cd4b90d0248f765d489" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.809291 4935 scope.go:117] "RemoveContainer" containerID="9f6a2962219a4abc87f29826be8920cc9a6aec2fe1f289ea5324cc2f18327d39" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.818579 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xsnh5"] Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.833882 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xsnh5"] Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.837820 4935 scope.go:117] "RemoveContainer" containerID="93f22c170a6968c2d325a6e50ed9644dbca08b7292a2685af74a6e632613a545" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.895970 4935 scope.go:117] "RemoveContainer" containerID="726deda634c12748b2380b34094fbfa23172641a2df00cd4b90d0248f765d489" Dec 01 19:52:20 crc kubenswrapper[4935]: E1201 19:52:20.896548 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"726deda634c12748b2380b34094fbfa23172641a2df00cd4b90d0248f765d489\": container with ID starting with 726deda634c12748b2380b34094fbfa23172641a2df00cd4b90d0248f765d489 not found: ID does not exist" containerID="726deda634c12748b2380b34094fbfa23172641a2df00cd4b90d0248f765d489" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.896581 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"726deda634c12748b2380b34094fbfa23172641a2df00cd4b90d0248f765d489"} err="failed to get container status \"726deda634c12748b2380b34094fbfa23172641a2df00cd4b90d0248f765d489\": rpc error: code = NotFound desc = could not find container \"726deda634c12748b2380b34094fbfa23172641a2df00cd4b90d0248f765d489\": container with ID starting with 726deda634c12748b2380b34094fbfa23172641a2df00cd4b90d0248f765d489 not found: ID does not exist" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.896604 4935 scope.go:117] "RemoveContainer" containerID="9f6a2962219a4abc87f29826be8920cc9a6aec2fe1f289ea5324cc2f18327d39" Dec 01 19:52:20 crc kubenswrapper[4935]: E1201 19:52:20.897102 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f6a2962219a4abc87f29826be8920cc9a6aec2fe1f289ea5324cc2f18327d39\": container with ID starting with 9f6a2962219a4abc87f29826be8920cc9a6aec2fe1f289ea5324cc2f18327d39 not found: ID does not exist" containerID="9f6a2962219a4abc87f29826be8920cc9a6aec2fe1f289ea5324cc2f18327d39" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.897170 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f6a2962219a4abc87f29826be8920cc9a6aec2fe1f289ea5324cc2f18327d39"} err="failed to get container status \"9f6a2962219a4abc87f29826be8920cc9a6aec2fe1f289ea5324cc2f18327d39\": rpc error: code = NotFound desc = could not find container \"9f6a2962219a4abc87f29826be8920cc9a6aec2fe1f289ea5324cc2f18327d39\": container with ID starting with 9f6a2962219a4abc87f29826be8920cc9a6aec2fe1f289ea5324cc2f18327d39 not found: ID does not exist" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.897208 4935 scope.go:117] "RemoveContainer" containerID="93f22c170a6968c2d325a6e50ed9644dbca08b7292a2685af74a6e632613a545" Dec 01 19:52:20 crc kubenswrapper[4935]: E1201 19:52:20.897543 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93f22c170a6968c2d325a6e50ed9644dbca08b7292a2685af74a6e632613a545\": container with ID starting with 93f22c170a6968c2d325a6e50ed9644dbca08b7292a2685af74a6e632613a545 not found: ID does not exist" containerID="93f22c170a6968c2d325a6e50ed9644dbca08b7292a2685af74a6e632613a545" Dec 01 19:52:20 crc kubenswrapper[4935]: I1201 19:52:20.897568 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93f22c170a6968c2d325a6e50ed9644dbca08b7292a2685af74a6e632613a545"} err="failed to get container status \"93f22c170a6968c2d325a6e50ed9644dbca08b7292a2685af74a6e632613a545\": rpc error: code = NotFound desc = could not find container \"93f22c170a6968c2d325a6e50ed9644dbca08b7292a2685af74a6e632613a545\": container with ID starting with 93f22c170a6968c2d325a6e50ed9644dbca08b7292a2685af74a6e632613a545 not found: ID does not exist" Dec 01 19:52:22 crc kubenswrapper[4935]: I1201 19:52:22.523359 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa7ede37-e261-4756-a60a-cc8c378b4d10" path="/var/lib/kubelet/pods/aa7ede37-e261-4756-a60a-cc8c378b4d10/volumes" Dec 01 19:52:24 crc kubenswrapper[4935]: I1201 19:52:24.346791 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:52:24 crc kubenswrapper[4935]: I1201 19:52:24.348002 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:52:54 crc kubenswrapper[4935]: I1201 19:52:54.345870 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:52:54 crc kubenswrapper[4935]: I1201 19:52:54.346599 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:52:54 crc kubenswrapper[4935]: I1201 19:52:54.346727 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 19:52:54 crc kubenswrapper[4935]: I1201 19:52:54.348049 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:52:54 crc kubenswrapper[4935]: I1201 19:52:54.348216 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" gracePeriod=600 Dec 01 19:52:54 crc kubenswrapper[4935]: E1201 19:52:54.476974 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:52:55 crc kubenswrapper[4935]: I1201 19:52:55.241121 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" exitCode=0 Dec 01 19:52:55 crc kubenswrapper[4935]: I1201 19:52:55.241217 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185"} Dec 01 19:52:55 crc kubenswrapper[4935]: I1201 19:52:55.241324 4935 scope.go:117] "RemoveContainer" containerID="d068dbefcec89149f9c1bdae291e37c56e8d8233e38f213a4a6697084210d042" Dec 01 19:52:55 crc kubenswrapper[4935]: I1201 19:52:55.242385 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:52:55 crc kubenswrapper[4935]: E1201 19:52:55.243092 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:53:09 crc kubenswrapper[4935]: I1201 19:53:09.509135 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:53:09 crc kubenswrapper[4935]: E1201 19:53:09.509922 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:53:20 crc kubenswrapper[4935]: I1201 19:53:20.509755 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:53:20 crc kubenswrapper[4935]: E1201 19:53:20.511200 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:53:29 crc kubenswrapper[4935]: E1201 19:53:29.044433 4935 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.65:51352->38.102.83.65:38587: write tcp 38.102.83.65:51352->38.102.83.65:38587: write: broken pipe Dec 01 19:53:34 crc kubenswrapper[4935]: I1201 19:53:34.508777 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:53:34 crc kubenswrapper[4935]: E1201 19:53:34.509636 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:53:47 crc kubenswrapper[4935]: I1201 19:53:47.508029 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:53:47 crc kubenswrapper[4935]: E1201 19:53:47.508796 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:53:58 crc kubenswrapper[4935]: I1201 19:53:58.508989 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:53:58 crc kubenswrapper[4935]: E1201 19:53:58.509717 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:54:10 crc kubenswrapper[4935]: I1201 19:54:10.507783 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:54:10 crc kubenswrapper[4935]: E1201 19:54:10.508568 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:54:23 crc kubenswrapper[4935]: I1201 19:54:23.508204 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:54:23 crc kubenswrapper[4935]: E1201 19:54:23.509183 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.070941 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zs4br"] Dec 01 19:54:30 crc kubenswrapper[4935]: E1201 19:54:30.072280 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa7ede37-e261-4756-a60a-cc8c378b4d10" containerName="extract-content" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.072300 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa7ede37-e261-4756-a60a-cc8c378b4d10" containerName="extract-content" Dec 01 19:54:30 crc kubenswrapper[4935]: E1201 19:54:30.072320 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa7ede37-e261-4756-a60a-cc8c378b4d10" containerName="extract-utilities" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.072329 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa7ede37-e261-4756-a60a-cc8c378b4d10" containerName="extract-utilities" Dec 01 19:54:30 crc kubenswrapper[4935]: E1201 19:54:30.072351 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa7ede37-e261-4756-a60a-cc8c378b4d10" containerName="registry-server" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.072360 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa7ede37-e261-4756-a60a-cc8c378b4d10" containerName="registry-server" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.072654 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa7ede37-e261-4756-a60a-cc8c378b4d10" containerName="registry-server" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.074731 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.093963 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zs4br"] Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.168382 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtph6\" (UniqueName: \"kubernetes.io/projected/8d3bce75-6f46-436b-a6f8-92cba549f016-kube-api-access-dtph6\") pod \"redhat-marketplace-zs4br\" (UID: \"8d3bce75-6f46-436b-a6f8-92cba549f016\") " pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.168613 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d3bce75-6f46-436b-a6f8-92cba549f016-utilities\") pod \"redhat-marketplace-zs4br\" (UID: \"8d3bce75-6f46-436b-a6f8-92cba549f016\") " pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.168672 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d3bce75-6f46-436b-a6f8-92cba549f016-catalog-content\") pod \"redhat-marketplace-zs4br\" (UID: \"8d3bce75-6f46-436b-a6f8-92cba549f016\") " pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.271201 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d3bce75-6f46-436b-a6f8-92cba549f016-utilities\") pod \"redhat-marketplace-zs4br\" (UID: \"8d3bce75-6f46-436b-a6f8-92cba549f016\") " pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.271303 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d3bce75-6f46-436b-a6f8-92cba549f016-catalog-content\") pod \"redhat-marketplace-zs4br\" (UID: \"8d3bce75-6f46-436b-a6f8-92cba549f016\") " pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.271395 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtph6\" (UniqueName: \"kubernetes.io/projected/8d3bce75-6f46-436b-a6f8-92cba549f016-kube-api-access-dtph6\") pod \"redhat-marketplace-zs4br\" (UID: \"8d3bce75-6f46-436b-a6f8-92cba549f016\") " pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.271984 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d3bce75-6f46-436b-a6f8-92cba549f016-utilities\") pod \"redhat-marketplace-zs4br\" (UID: \"8d3bce75-6f46-436b-a6f8-92cba549f016\") " pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.272200 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d3bce75-6f46-436b-a6f8-92cba549f016-catalog-content\") pod \"redhat-marketplace-zs4br\" (UID: \"8d3bce75-6f46-436b-a6f8-92cba549f016\") " pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.290883 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtph6\" (UniqueName: \"kubernetes.io/projected/8d3bce75-6f46-436b-a6f8-92cba549f016-kube-api-access-dtph6\") pod \"redhat-marketplace-zs4br\" (UID: \"8d3bce75-6f46-436b-a6f8-92cba549f016\") " pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.407089 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:30 crc kubenswrapper[4935]: I1201 19:54:30.917399 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zs4br"] Dec 01 19:54:31 crc kubenswrapper[4935]: I1201 19:54:31.035860 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zs4br" event={"ID":"8d3bce75-6f46-436b-a6f8-92cba549f016","Type":"ContainerStarted","Data":"79814a5b4dfb261e16f5a88dffa9198b0ee6714d6876c6dbb5ec3e7f671c16cb"} Dec 01 19:54:32 crc kubenswrapper[4935]: I1201 19:54:32.051414 4935 generic.go:334] "Generic (PLEG): container finished" podID="8d3bce75-6f46-436b-a6f8-92cba549f016" containerID="803fde5d5c6592028ce2cf4ce00844d4b561a25e1bd6f086478e79e1e9f5715c" exitCode=0 Dec 01 19:54:32 crc kubenswrapper[4935]: I1201 19:54:32.051619 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zs4br" event={"ID":"8d3bce75-6f46-436b-a6f8-92cba549f016","Type":"ContainerDied","Data":"803fde5d5c6592028ce2cf4ce00844d4b561a25e1bd6f086478e79e1e9f5715c"} Dec 01 19:54:35 crc kubenswrapper[4935]: I1201 19:54:35.083537 4935 generic.go:334] "Generic (PLEG): container finished" podID="8d3bce75-6f46-436b-a6f8-92cba549f016" containerID="96a40d1d73ec5569222a2cc471e5928b08e6b923c92e736cad87d9482d440d6a" exitCode=0 Dec 01 19:54:35 crc kubenswrapper[4935]: I1201 19:54:35.083636 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zs4br" event={"ID":"8d3bce75-6f46-436b-a6f8-92cba549f016","Type":"ContainerDied","Data":"96a40d1d73ec5569222a2cc471e5928b08e6b923c92e736cad87d9482d440d6a"} Dec 01 19:54:36 crc kubenswrapper[4935]: I1201 19:54:36.517717 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:54:36 crc kubenswrapper[4935]: E1201 19:54:36.518821 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:54:37 crc kubenswrapper[4935]: I1201 19:54:37.135634 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zs4br" event={"ID":"8d3bce75-6f46-436b-a6f8-92cba549f016","Type":"ContainerStarted","Data":"ae17ea0e203053cbb08f194d0f16f1c34b04741586b90af95e49763a405844e1"} Dec 01 19:54:37 crc kubenswrapper[4935]: I1201 19:54:37.174492 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zs4br" podStartSLOduration=3.562850853 podStartE2EDuration="7.174465428s" podCreationTimestamp="2025-12-01 19:54:30 +0000 UTC" firstStartedPulling="2025-12-01 19:54:32.054020936 +0000 UTC m=+5086.075650195" lastFinishedPulling="2025-12-01 19:54:35.665635511 +0000 UTC m=+5089.687264770" observedRunningTime="2025-12-01 19:54:37.1637152 +0000 UTC m=+5091.185344469" watchObservedRunningTime="2025-12-01 19:54:37.174465428 +0000 UTC m=+5091.196094687" Dec 01 19:54:40 crc kubenswrapper[4935]: I1201 19:54:40.408323 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:40 crc kubenswrapper[4935]: I1201 19:54:40.408618 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:40 crc kubenswrapper[4935]: I1201 19:54:40.469011 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:41 crc kubenswrapper[4935]: I1201 19:54:41.236423 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:41 crc kubenswrapper[4935]: I1201 19:54:41.281537 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zs4br"] Dec 01 19:54:43 crc kubenswrapper[4935]: I1201 19:54:43.203121 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zs4br" podUID="8d3bce75-6f46-436b-a6f8-92cba549f016" containerName="registry-server" containerID="cri-o://ae17ea0e203053cbb08f194d0f16f1c34b04741586b90af95e49763a405844e1" gracePeriod=2 Dec 01 19:54:43 crc kubenswrapper[4935]: I1201 19:54:43.767969 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:43 crc kubenswrapper[4935]: I1201 19:54:43.875363 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtph6\" (UniqueName: \"kubernetes.io/projected/8d3bce75-6f46-436b-a6f8-92cba549f016-kube-api-access-dtph6\") pod \"8d3bce75-6f46-436b-a6f8-92cba549f016\" (UID: \"8d3bce75-6f46-436b-a6f8-92cba549f016\") " Dec 01 19:54:43 crc kubenswrapper[4935]: I1201 19:54:43.875434 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d3bce75-6f46-436b-a6f8-92cba549f016-utilities\") pod \"8d3bce75-6f46-436b-a6f8-92cba549f016\" (UID: \"8d3bce75-6f46-436b-a6f8-92cba549f016\") " Dec 01 19:54:43 crc kubenswrapper[4935]: I1201 19:54:43.875452 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d3bce75-6f46-436b-a6f8-92cba549f016-catalog-content\") pod \"8d3bce75-6f46-436b-a6f8-92cba549f016\" (UID: \"8d3bce75-6f46-436b-a6f8-92cba549f016\") " Dec 01 19:54:43 crc kubenswrapper[4935]: I1201 19:54:43.877335 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d3bce75-6f46-436b-a6f8-92cba549f016-utilities" (OuterVolumeSpecName: "utilities") pod "8d3bce75-6f46-436b-a6f8-92cba549f016" (UID: "8d3bce75-6f46-436b-a6f8-92cba549f016"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:54:43 crc kubenswrapper[4935]: I1201 19:54:43.881089 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d3bce75-6f46-436b-a6f8-92cba549f016-kube-api-access-dtph6" (OuterVolumeSpecName: "kube-api-access-dtph6") pod "8d3bce75-6f46-436b-a6f8-92cba549f016" (UID: "8d3bce75-6f46-436b-a6f8-92cba549f016"). InnerVolumeSpecName "kube-api-access-dtph6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:43 crc kubenswrapper[4935]: I1201 19:54:43.897288 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d3bce75-6f46-436b-a6f8-92cba549f016-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8d3bce75-6f46-436b-a6f8-92cba549f016" (UID: "8d3bce75-6f46-436b-a6f8-92cba549f016"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:54:43 crc kubenswrapper[4935]: I1201 19:54:43.978182 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtph6\" (UniqueName: \"kubernetes.io/projected/8d3bce75-6f46-436b-a6f8-92cba549f016-kube-api-access-dtph6\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:43 crc kubenswrapper[4935]: I1201 19:54:43.978223 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d3bce75-6f46-436b-a6f8-92cba549f016-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:43 crc kubenswrapper[4935]: I1201 19:54:43.978234 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d3bce75-6f46-436b-a6f8-92cba549f016-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.215624 4935 generic.go:334] "Generic (PLEG): container finished" podID="8d3bce75-6f46-436b-a6f8-92cba549f016" containerID="ae17ea0e203053cbb08f194d0f16f1c34b04741586b90af95e49763a405844e1" exitCode=0 Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.215676 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zs4br" event={"ID":"8d3bce75-6f46-436b-a6f8-92cba549f016","Type":"ContainerDied","Data":"ae17ea0e203053cbb08f194d0f16f1c34b04741586b90af95e49763a405844e1"} Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.215707 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zs4br" event={"ID":"8d3bce75-6f46-436b-a6f8-92cba549f016","Type":"ContainerDied","Data":"79814a5b4dfb261e16f5a88dffa9198b0ee6714d6876c6dbb5ec3e7f671c16cb"} Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.215726 4935 scope.go:117] "RemoveContainer" containerID="ae17ea0e203053cbb08f194d0f16f1c34b04741586b90af95e49763a405844e1" Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.215871 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zs4br" Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.256884 4935 scope.go:117] "RemoveContainer" containerID="96a40d1d73ec5569222a2cc471e5928b08e6b923c92e736cad87d9482d440d6a" Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.264444 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zs4br"] Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.275022 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zs4br"] Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.300906 4935 scope.go:117] "RemoveContainer" containerID="803fde5d5c6592028ce2cf4ce00844d4b561a25e1bd6f086478e79e1e9f5715c" Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.338116 4935 scope.go:117] "RemoveContainer" containerID="ae17ea0e203053cbb08f194d0f16f1c34b04741586b90af95e49763a405844e1" Dec 01 19:54:44 crc kubenswrapper[4935]: E1201 19:54:44.339074 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae17ea0e203053cbb08f194d0f16f1c34b04741586b90af95e49763a405844e1\": container with ID starting with ae17ea0e203053cbb08f194d0f16f1c34b04741586b90af95e49763a405844e1 not found: ID does not exist" containerID="ae17ea0e203053cbb08f194d0f16f1c34b04741586b90af95e49763a405844e1" Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.339122 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae17ea0e203053cbb08f194d0f16f1c34b04741586b90af95e49763a405844e1"} err="failed to get container status \"ae17ea0e203053cbb08f194d0f16f1c34b04741586b90af95e49763a405844e1\": rpc error: code = NotFound desc = could not find container \"ae17ea0e203053cbb08f194d0f16f1c34b04741586b90af95e49763a405844e1\": container with ID starting with ae17ea0e203053cbb08f194d0f16f1c34b04741586b90af95e49763a405844e1 not found: ID does not exist" Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.339215 4935 scope.go:117] "RemoveContainer" containerID="96a40d1d73ec5569222a2cc471e5928b08e6b923c92e736cad87d9482d440d6a" Dec 01 19:54:44 crc kubenswrapper[4935]: E1201 19:54:44.339609 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96a40d1d73ec5569222a2cc471e5928b08e6b923c92e736cad87d9482d440d6a\": container with ID starting with 96a40d1d73ec5569222a2cc471e5928b08e6b923c92e736cad87d9482d440d6a not found: ID does not exist" containerID="96a40d1d73ec5569222a2cc471e5928b08e6b923c92e736cad87d9482d440d6a" Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.339637 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96a40d1d73ec5569222a2cc471e5928b08e6b923c92e736cad87d9482d440d6a"} err="failed to get container status \"96a40d1d73ec5569222a2cc471e5928b08e6b923c92e736cad87d9482d440d6a\": rpc error: code = NotFound desc = could not find container \"96a40d1d73ec5569222a2cc471e5928b08e6b923c92e736cad87d9482d440d6a\": container with ID starting with 96a40d1d73ec5569222a2cc471e5928b08e6b923c92e736cad87d9482d440d6a not found: ID does not exist" Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.339657 4935 scope.go:117] "RemoveContainer" containerID="803fde5d5c6592028ce2cf4ce00844d4b561a25e1bd6f086478e79e1e9f5715c" Dec 01 19:54:44 crc kubenswrapper[4935]: E1201 19:54:44.339973 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"803fde5d5c6592028ce2cf4ce00844d4b561a25e1bd6f086478e79e1e9f5715c\": container with ID starting with 803fde5d5c6592028ce2cf4ce00844d4b561a25e1bd6f086478e79e1e9f5715c not found: ID does not exist" containerID="803fde5d5c6592028ce2cf4ce00844d4b561a25e1bd6f086478e79e1e9f5715c" Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.339999 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"803fde5d5c6592028ce2cf4ce00844d4b561a25e1bd6f086478e79e1e9f5715c"} err="failed to get container status \"803fde5d5c6592028ce2cf4ce00844d4b561a25e1bd6f086478e79e1e9f5715c\": rpc error: code = NotFound desc = could not find container \"803fde5d5c6592028ce2cf4ce00844d4b561a25e1bd6f086478e79e1e9f5715c\": container with ID starting with 803fde5d5c6592028ce2cf4ce00844d4b561a25e1bd6f086478e79e1e9f5715c not found: ID does not exist" Dec 01 19:54:44 crc kubenswrapper[4935]: I1201 19:54:44.525324 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d3bce75-6f46-436b-a6f8-92cba549f016" path="/var/lib/kubelet/pods/8d3bce75-6f46-436b-a6f8-92cba549f016/volumes" Dec 01 19:54:47 crc kubenswrapper[4935]: I1201 19:54:47.507817 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:54:47 crc kubenswrapper[4935]: E1201 19:54:47.508595 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:55:00 crc kubenswrapper[4935]: I1201 19:55:00.511337 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:55:00 crc kubenswrapper[4935]: E1201 19:55:00.512707 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:55:14 crc kubenswrapper[4935]: I1201 19:55:14.509093 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:55:14 crc kubenswrapper[4935]: E1201 19:55:14.510131 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:55:25 crc kubenswrapper[4935]: I1201 19:55:25.509275 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:55:25 crc kubenswrapper[4935]: E1201 19:55:25.510468 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:55:38 crc kubenswrapper[4935]: I1201 19:55:38.509478 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:55:38 crc kubenswrapper[4935]: E1201 19:55:38.510916 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:55:50 crc kubenswrapper[4935]: I1201 19:55:50.508371 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:55:50 crc kubenswrapper[4935]: E1201 19:55:50.509590 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:56:04 crc kubenswrapper[4935]: I1201 19:56:04.509425 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:56:04 crc kubenswrapper[4935]: E1201 19:56:04.510533 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:56:04 crc kubenswrapper[4935]: I1201 19:56:04.759264 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bzrxf"] Dec 01 19:56:04 crc kubenswrapper[4935]: E1201 19:56:04.759867 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d3bce75-6f46-436b-a6f8-92cba549f016" containerName="extract-content" Dec 01 19:56:04 crc kubenswrapper[4935]: I1201 19:56:04.759888 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d3bce75-6f46-436b-a6f8-92cba549f016" containerName="extract-content" Dec 01 19:56:04 crc kubenswrapper[4935]: E1201 19:56:04.759902 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d3bce75-6f46-436b-a6f8-92cba549f016" containerName="registry-server" Dec 01 19:56:04 crc kubenswrapper[4935]: I1201 19:56:04.759910 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d3bce75-6f46-436b-a6f8-92cba549f016" containerName="registry-server" Dec 01 19:56:04 crc kubenswrapper[4935]: E1201 19:56:04.759967 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d3bce75-6f46-436b-a6f8-92cba549f016" containerName="extract-utilities" Dec 01 19:56:04 crc kubenswrapper[4935]: I1201 19:56:04.759977 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d3bce75-6f46-436b-a6f8-92cba549f016" containerName="extract-utilities" Dec 01 19:56:04 crc kubenswrapper[4935]: I1201 19:56:04.760291 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d3bce75-6f46-436b-a6f8-92cba549f016" containerName="registry-server" Dec 01 19:56:04 crc kubenswrapper[4935]: I1201 19:56:04.765528 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:04 crc kubenswrapper[4935]: I1201 19:56:04.796605 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bzrxf"] Dec 01 19:56:04 crc kubenswrapper[4935]: I1201 19:56:04.934132 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zbhz\" (UniqueName: \"kubernetes.io/projected/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-kube-api-access-5zbhz\") pod \"redhat-operators-bzrxf\" (UID: \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\") " pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:04 crc kubenswrapper[4935]: I1201 19:56:04.934289 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-utilities\") pod \"redhat-operators-bzrxf\" (UID: \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\") " pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:04 crc kubenswrapper[4935]: I1201 19:56:04.934313 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-catalog-content\") pod \"redhat-operators-bzrxf\" (UID: \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\") " pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:05 crc kubenswrapper[4935]: I1201 19:56:05.036417 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-utilities\") pod \"redhat-operators-bzrxf\" (UID: \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\") " pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:05 crc kubenswrapper[4935]: I1201 19:56:05.036487 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-catalog-content\") pod \"redhat-operators-bzrxf\" (UID: \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\") " pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:05 crc kubenswrapper[4935]: I1201 19:56:05.036726 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zbhz\" (UniqueName: \"kubernetes.io/projected/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-kube-api-access-5zbhz\") pod \"redhat-operators-bzrxf\" (UID: \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\") " pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:05 crc kubenswrapper[4935]: I1201 19:56:05.037115 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-utilities\") pod \"redhat-operators-bzrxf\" (UID: \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\") " pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:05 crc kubenswrapper[4935]: I1201 19:56:05.037219 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-catalog-content\") pod \"redhat-operators-bzrxf\" (UID: \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\") " pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:05 crc kubenswrapper[4935]: I1201 19:56:05.057998 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zbhz\" (UniqueName: \"kubernetes.io/projected/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-kube-api-access-5zbhz\") pod \"redhat-operators-bzrxf\" (UID: \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\") " pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:05 crc kubenswrapper[4935]: I1201 19:56:05.097474 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:05 crc kubenswrapper[4935]: I1201 19:56:05.595485 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bzrxf"] Dec 01 19:56:06 crc kubenswrapper[4935]: I1201 19:56:06.269446 4935 generic.go:334] "Generic (PLEG): container finished" podID="879e2254-29c2-4e0d-bb9c-59d3d4febdd4" containerID="01df2607627a87a63e9437e11d4c9661b065b5a770775ea083acb211e82d39ae" exitCode=0 Dec 01 19:56:06 crc kubenswrapper[4935]: I1201 19:56:06.269554 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzrxf" event={"ID":"879e2254-29c2-4e0d-bb9c-59d3d4febdd4","Type":"ContainerDied","Data":"01df2607627a87a63e9437e11d4c9661b065b5a770775ea083acb211e82d39ae"} Dec 01 19:56:06 crc kubenswrapper[4935]: I1201 19:56:06.269834 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzrxf" event={"ID":"879e2254-29c2-4e0d-bb9c-59d3d4febdd4","Type":"ContainerStarted","Data":"07d80b591d82e621b5504973cf7b8c525dca1797750ce298ea8415194a5552ba"} Dec 01 19:56:06 crc kubenswrapper[4935]: I1201 19:56:06.271691 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 19:56:08 crc kubenswrapper[4935]: I1201 19:56:08.289515 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzrxf" event={"ID":"879e2254-29c2-4e0d-bb9c-59d3d4febdd4","Type":"ContainerStarted","Data":"30c0bd7f7abd0f36d18d134f61bca0420249670d8aaac1a1447af603616c7b14"} Dec 01 19:56:09 crc kubenswrapper[4935]: E1201 19:56:09.948378 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod879e2254_29c2_4e0d_bb9c_59d3d4febdd4.slice/crio-30c0bd7f7abd0f36d18d134f61bca0420249670d8aaac1a1447af603616c7b14.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:56:10 crc kubenswrapper[4935]: I1201 19:56:10.313083 4935 generic.go:334] "Generic (PLEG): container finished" podID="879e2254-29c2-4e0d-bb9c-59d3d4febdd4" containerID="30c0bd7f7abd0f36d18d134f61bca0420249670d8aaac1a1447af603616c7b14" exitCode=0 Dec 01 19:56:10 crc kubenswrapper[4935]: I1201 19:56:10.313204 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzrxf" event={"ID":"879e2254-29c2-4e0d-bb9c-59d3d4febdd4","Type":"ContainerDied","Data":"30c0bd7f7abd0f36d18d134f61bca0420249670d8aaac1a1447af603616c7b14"} Dec 01 19:56:11 crc kubenswrapper[4935]: I1201 19:56:11.327031 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzrxf" event={"ID":"879e2254-29c2-4e0d-bb9c-59d3d4febdd4","Type":"ContainerStarted","Data":"04495bd7df41763ad88969cae4a4c63bbc958da8f86f3dc22d89fb02520cf558"} Dec 01 19:56:11 crc kubenswrapper[4935]: I1201 19:56:11.352935 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bzrxf" podStartSLOduration=2.6663641289999997 podStartE2EDuration="7.35291283s" podCreationTimestamp="2025-12-01 19:56:04 +0000 UTC" firstStartedPulling="2025-12-01 19:56:06.271458692 +0000 UTC m=+5180.293087951" lastFinishedPulling="2025-12-01 19:56:10.958007343 +0000 UTC m=+5184.979636652" observedRunningTime="2025-12-01 19:56:11.347772653 +0000 UTC m=+5185.369401912" watchObservedRunningTime="2025-12-01 19:56:11.35291283 +0000 UTC m=+5185.374542099" Dec 01 19:56:15 crc kubenswrapper[4935]: I1201 19:56:15.097945 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:15 crc kubenswrapper[4935]: I1201 19:56:15.098613 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:16 crc kubenswrapper[4935]: I1201 19:56:16.175301 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bzrxf" podUID="879e2254-29c2-4e0d-bb9c-59d3d4febdd4" containerName="registry-server" probeResult="failure" output=< Dec 01 19:56:16 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 19:56:16 crc kubenswrapper[4935]: > Dec 01 19:56:18 crc kubenswrapper[4935]: I1201 19:56:18.509658 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:56:18 crc kubenswrapper[4935]: E1201 19:56:18.512038 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:56:25 crc kubenswrapper[4935]: I1201 19:56:25.194366 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:25 crc kubenswrapper[4935]: I1201 19:56:25.282406 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:25 crc kubenswrapper[4935]: I1201 19:56:25.461494 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bzrxf"] Dec 01 19:56:26 crc kubenswrapper[4935]: I1201 19:56:26.546848 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bzrxf" podUID="879e2254-29c2-4e0d-bb9c-59d3d4febdd4" containerName="registry-server" containerID="cri-o://04495bd7df41763ad88969cae4a4c63bbc958da8f86f3dc22d89fb02520cf558" gracePeriod=2 Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.108616 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.225441 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-utilities\") pod \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\" (UID: \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\") " Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.225639 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-catalog-content\") pod \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\" (UID: \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\") " Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.225792 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zbhz\" (UniqueName: \"kubernetes.io/projected/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-kube-api-access-5zbhz\") pod \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\" (UID: \"879e2254-29c2-4e0d-bb9c-59d3d4febdd4\") " Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.226520 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-utilities" (OuterVolumeSpecName: "utilities") pod "879e2254-29c2-4e0d-bb9c-59d3d4febdd4" (UID: "879e2254-29c2-4e0d-bb9c-59d3d4febdd4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.227133 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.233289 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-kube-api-access-5zbhz" (OuterVolumeSpecName: "kube-api-access-5zbhz") pod "879e2254-29c2-4e0d-bb9c-59d3d4febdd4" (UID: "879e2254-29c2-4e0d-bb9c-59d3d4febdd4"). InnerVolumeSpecName "kube-api-access-5zbhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.329744 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zbhz\" (UniqueName: \"kubernetes.io/projected/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-kube-api-access-5zbhz\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.372965 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "879e2254-29c2-4e0d-bb9c-59d3d4febdd4" (UID: "879e2254-29c2-4e0d-bb9c-59d3d4febdd4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.433714 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/879e2254-29c2-4e0d-bb9c-59d3d4febdd4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.567648 4935 generic.go:334] "Generic (PLEG): container finished" podID="879e2254-29c2-4e0d-bb9c-59d3d4febdd4" containerID="04495bd7df41763ad88969cae4a4c63bbc958da8f86f3dc22d89fb02520cf558" exitCode=0 Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.567696 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzrxf" event={"ID":"879e2254-29c2-4e0d-bb9c-59d3d4febdd4","Type":"ContainerDied","Data":"04495bd7df41763ad88969cae4a4c63bbc958da8f86f3dc22d89fb02520cf558"} Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.567728 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzrxf" event={"ID":"879e2254-29c2-4e0d-bb9c-59d3d4febdd4","Type":"ContainerDied","Data":"07d80b591d82e621b5504973cf7b8c525dca1797750ce298ea8415194a5552ba"} Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.567752 4935 scope.go:117] "RemoveContainer" containerID="04495bd7df41763ad88969cae4a4c63bbc958da8f86f3dc22d89fb02520cf558" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.567925 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bzrxf" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.617525 4935 scope.go:117] "RemoveContainer" containerID="30c0bd7f7abd0f36d18d134f61bca0420249670d8aaac1a1447af603616c7b14" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.667075 4935 scope.go:117] "RemoveContainer" containerID="01df2607627a87a63e9437e11d4c9661b065b5a770775ea083acb211e82d39ae" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.683400 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bzrxf"] Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.698616 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bzrxf"] Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.722120 4935 scope.go:117] "RemoveContainer" containerID="04495bd7df41763ad88969cae4a4c63bbc958da8f86f3dc22d89fb02520cf558" Dec 01 19:56:27 crc kubenswrapper[4935]: E1201 19:56:27.722563 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04495bd7df41763ad88969cae4a4c63bbc958da8f86f3dc22d89fb02520cf558\": container with ID starting with 04495bd7df41763ad88969cae4a4c63bbc958da8f86f3dc22d89fb02520cf558 not found: ID does not exist" containerID="04495bd7df41763ad88969cae4a4c63bbc958da8f86f3dc22d89fb02520cf558" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.722597 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04495bd7df41763ad88969cae4a4c63bbc958da8f86f3dc22d89fb02520cf558"} err="failed to get container status \"04495bd7df41763ad88969cae4a4c63bbc958da8f86f3dc22d89fb02520cf558\": rpc error: code = NotFound desc = could not find container \"04495bd7df41763ad88969cae4a4c63bbc958da8f86f3dc22d89fb02520cf558\": container with ID starting with 04495bd7df41763ad88969cae4a4c63bbc958da8f86f3dc22d89fb02520cf558 not found: ID does not exist" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.722620 4935 scope.go:117] "RemoveContainer" containerID="30c0bd7f7abd0f36d18d134f61bca0420249670d8aaac1a1447af603616c7b14" Dec 01 19:56:27 crc kubenswrapper[4935]: E1201 19:56:27.722906 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30c0bd7f7abd0f36d18d134f61bca0420249670d8aaac1a1447af603616c7b14\": container with ID starting with 30c0bd7f7abd0f36d18d134f61bca0420249670d8aaac1a1447af603616c7b14 not found: ID does not exist" containerID="30c0bd7f7abd0f36d18d134f61bca0420249670d8aaac1a1447af603616c7b14" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.722957 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30c0bd7f7abd0f36d18d134f61bca0420249670d8aaac1a1447af603616c7b14"} err="failed to get container status \"30c0bd7f7abd0f36d18d134f61bca0420249670d8aaac1a1447af603616c7b14\": rpc error: code = NotFound desc = could not find container \"30c0bd7f7abd0f36d18d134f61bca0420249670d8aaac1a1447af603616c7b14\": container with ID starting with 30c0bd7f7abd0f36d18d134f61bca0420249670d8aaac1a1447af603616c7b14 not found: ID does not exist" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.722991 4935 scope.go:117] "RemoveContainer" containerID="01df2607627a87a63e9437e11d4c9661b065b5a770775ea083acb211e82d39ae" Dec 01 19:56:27 crc kubenswrapper[4935]: E1201 19:56:27.723503 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01df2607627a87a63e9437e11d4c9661b065b5a770775ea083acb211e82d39ae\": container with ID starting with 01df2607627a87a63e9437e11d4c9661b065b5a770775ea083acb211e82d39ae not found: ID does not exist" containerID="01df2607627a87a63e9437e11d4c9661b065b5a770775ea083acb211e82d39ae" Dec 01 19:56:27 crc kubenswrapper[4935]: I1201 19:56:27.723534 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01df2607627a87a63e9437e11d4c9661b065b5a770775ea083acb211e82d39ae"} err="failed to get container status \"01df2607627a87a63e9437e11d4c9661b065b5a770775ea083acb211e82d39ae\": rpc error: code = NotFound desc = could not find container \"01df2607627a87a63e9437e11d4c9661b065b5a770775ea083acb211e82d39ae\": container with ID starting with 01df2607627a87a63e9437e11d4c9661b065b5a770775ea083acb211e82d39ae not found: ID does not exist" Dec 01 19:56:28 crc kubenswrapper[4935]: I1201 19:56:28.530527 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="879e2254-29c2-4e0d-bb9c-59d3d4febdd4" path="/var/lib/kubelet/pods/879e2254-29c2-4e0d-bb9c-59d3d4febdd4/volumes" Dec 01 19:56:33 crc kubenswrapper[4935]: I1201 19:56:33.508933 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:56:33 crc kubenswrapper[4935]: E1201 19:56:33.510119 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:56:44 crc kubenswrapper[4935]: I1201 19:56:44.508812 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:56:44 crc kubenswrapper[4935]: E1201 19:56:44.509874 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:56:56 crc kubenswrapper[4935]: I1201 19:56:56.519380 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:56:56 crc kubenswrapper[4935]: E1201 19:56:56.520286 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:57:08 crc kubenswrapper[4935]: I1201 19:57:08.507983 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:57:08 crc kubenswrapper[4935]: E1201 19:57:08.508831 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:57:19 crc kubenswrapper[4935]: I1201 19:57:19.507826 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:57:19 crc kubenswrapper[4935]: E1201 19:57:19.508831 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:57:34 crc kubenswrapper[4935]: I1201 19:57:34.508021 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:57:34 crc kubenswrapper[4935]: E1201 19:57:34.508862 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:57:46 crc kubenswrapper[4935]: I1201 19:57:46.518563 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:57:46 crc kubenswrapper[4935]: E1201 19:57:46.519795 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 19:57:57 crc kubenswrapper[4935]: I1201 19:57:57.508644 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 19:57:58 crc kubenswrapper[4935]: I1201 19:57:58.832573 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"6b82e4de433a4b2d1c72ed1d6c320adde8e78c24624f68417ba5f43831f3c3cf"} Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.579296 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 01 19:58:54 crc kubenswrapper[4935]: E1201 19:58:54.580612 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="879e2254-29c2-4e0d-bb9c-59d3d4febdd4" containerName="extract-utilities" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.580629 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="879e2254-29c2-4e0d-bb9c-59d3d4febdd4" containerName="extract-utilities" Dec 01 19:58:54 crc kubenswrapper[4935]: E1201 19:58:54.580677 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="879e2254-29c2-4e0d-bb9c-59d3d4febdd4" containerName="registry-server" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.580685 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="879e2254-29c2-4e0d-bb9c-59d3d4febdd4" containerName="registry-server" Dec 01 19:58:54 crc kubenswrapper[4935]: E1201 19:58:54.580704 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="879e2254-29c2-4e0d-bb9c-59d3d4febdd4" containerName="extract-content" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.580711 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="879e2254-29c2-4e0d-bb9c-59d3d4febdd4" containerName="extract-content" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.581012 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="879e2254-29c2-4e0d-bb9c-59d3d4febdd4" containerName="registry-server" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.582085 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.585122 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.585120 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.589246 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-hh695" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.589262 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.596267 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.688610 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cm5m\" (UniqueName: \"kubernetes.io/projected/ed5fb4cf-a415-4429-af67-924e3f70cb3d-kube-api-access-6cm5m\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.688681 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.688866 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/ed5fb4cf-a415-4429-af67-924e3f70cb3d-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.688914 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/ed5fb4cf-a415-4429-af67-924e3f70cb3d-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.689197 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ed5fb4cf-a415-4429-af67-924e3f70cb3d-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.689289 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ed5fb4cf-a415-4429-af67-924e3f70cb3d-config-data\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.689329 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.689359 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.689517 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.791906 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ed5fb4cf-a415-4429-af67-924e3f70cb3d-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.791980 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ed5fb4cf-a415-4429-af67-924e3f70cb3d-config-data\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.792011 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.792822 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.793020 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.793182 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ed5fb4cf-a415-4429-af67-924e3f70cb3d-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.793359 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ed5fb4cf-a415-4429-af67-924e3f70cb3d-config-data\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.793482 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cm5m\" (UniqueName: \"kubernetes.io/projected/ed5fb4cf-a415-4429-af67-924e3f70cb3d-kube-api-access-6cm5m\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.793529 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.793641 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/ed5fb4cf-a415-4429-af67-924e3f70cb3d-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.793692 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/ed5fb4cf-a415-4429-af67-924e3f70cb3d-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.794122 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/ed5fb4cf-a415-4429-af67-924e3f70cb3d-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.794782 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/ed5fb4cf-a415-4429-af67-924e3f70cb3d-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.795656 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.803594 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.804094 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.805582 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.832240 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cm5m\" (UniqueName: \"kubernetes.io/projected/ed5fb4cf-a415-4429-af67-924e3f70cb3d-kube-api-access-6cm5m\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.844411 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"tempest-tests-tempest\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " pod="openstack/tempest-tests-tempest" Dec 01 19:58:54 crc kubenswrapper[4935]: I1201 19:58:54.915624 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 19:58:55 crc kubenswrapper[4935]: I1201 19:58:55.444087 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 01 19:58:56 crc kubenswrapper[4935]: I1201 19:58:56.655624 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"ed5fb4cf-a415-4429-af67-924e3f70cb3d","Type":"ContainerStarted","Data":"83ba65ceb49581f83d43009e8dbe04203cccb103cd6678fb7421499093f4fe96"} Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578584 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.446145092s: [/var/lib/containers/storage/overlay/1d0979fdba140eb220640b4e7a88ba2cafaf65b154909fb83a84de27ceeb7234/diff /var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-82j75_c5fb0811-5cb8-4bff-927c-99f4e08b8ae0/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578579 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.454016264s: [/var/lib/containers/storage/overlay/8d61db3fbcdcc98d14c2ac3fdb9cf2b076f69cf806717608bd103e5f40b32f55/diff /var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-w2v9w_8a1ded04-5c24-467c-a51b-c0cfbe67ba4b/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578601 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 1.895501719s: [/var/lib/containers/storage/overlay/bc74ca0fd4d808ffd3fbc86392f81d32e4de1d02d3ac805f82907df4938bc4c2/diff /var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-7ppgb_b5460053-e8df-4350-a4a4-ff44683d9f60/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578609 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.452518008s: [/var/lib/containers/storage/overlay/98b30513ea28e5397319e7240b080cf009c6ad3715cb34ab06064ab2e7fb23c4/diff /var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw_fef2b5dc-a162-4b91-ada5-f6af85d8fe20/manager/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578609 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 1.861986913s: [/var/lib/containers/storage/overlay/3a73f57ebf2395539fb2d2aebf38ec4fc1470c5b87b1aae9a0ed3bec691e5908/diff /var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-25tbb_114bfc93-038f-416e-8a85-f1697387b2e2/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578639 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.178625104s: [/var/lib/containers/storage/overlay/3a5defe7d280a64ee83c411816a3b4d5517c9a7197ce3a25a4718b820739b477/diff /var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-mhqkw_e553c27c-e8f0-4617-a914-46c8b5cfc33b/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578710 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 1.844958561s: [/var/lib/containers/storage/overlay/183b75d41864ff417fc06848fb2e34a3a100eabf63d7dfd8a033f1b9ec7c8e4a/diff /var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-4crq6_a66fb641-eb39-4326-a4cb-d4e006a57436/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578720 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.439122588s: [/var/lib/containers/storage/overlay/4f379d9d00fdb1b1488eeafb003d0e6f719bc7974de40e0c62c45446e40db036/diff /var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-wmr2q_0fa3cc8f-0a56-4bec-8afb-3fb3599fb222/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578750 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.069824634s: [/var/lib/containers/storage/overlay/41877a3a5470585636f3fc0b9d368828b53260de1b88bce90e0b6dc0d1ca4271/diff /var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-slbss_d834cbf1-7527-4530-94ca-a0188780da7d/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578827 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 1.814741817s: [/var/lib/containers/storage/overlay/075fc98c5cfb7412beb2eb667caf49a2a104d8b4bfa1df18ae51853cc1ccd336/diff /var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-vtzkd_d0336662-89bd-415e-8c22-2b05bf5dbf9f/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578888 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.45552298s: [/var/lib/containers/storage/overlay/e5612dfefc05a620ac70f91424ade794efea705e912ae22ed7bcc2921d98ce31/diff /var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-kg6tq_50e604ea-ddfe-470b-bbbf-b65a5948d9d7/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578876 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.452134056s: [/var/lib/containers/storage/overlay/2eae1866d974d5164586cd36806c49648b13a0258930f4e62e723f3db8a244af/diff /var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-vk8hm_8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578948 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.425869302s: [/var/lib/containers/storage/overlay/070843d5d4ddfb5d4c258783c6fc00a1a2ad0e1333667ab56821f89389b1c64d/diff /var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-wj722_07d2f6f9-58fc-4c36-b2b8-ce0c48424c28/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.578985 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.452309932s: [/var/lib/containers/storage/overlay/b8aefdd9aa9170814a9b134289f02137a6647595a25309c4209975c3d21b606c/diff /var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-zx4xc_671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.579046 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.088489106s: [/var/lib/containers/storage/overlay/99385c342581f3a318c3b76654165ccffb973cb1f18ca1f63532a975b2eee8a8/diff /var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-8xpwk_61654c85-dd73-48d3-9931-1ce7095e4f07/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.579174 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.384545677s: [/var/lib/containers/storage/overlay/f3cf15c6ac6e5b42c16d9736dc72b688d96121ed560f84e86f5cf4515407ba67/diff /var/log/pods/openstack-operators_telemetry-operator-controller-manager-7445b68fd8-4tjzb_6d09f2a0-653e-417a-8fee-53935bc27816/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:33 crc kubenswrapper[4935]: I1201 19:59:33.579369 4935 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 2.298847965s: [/var/lib/containers/storage/overlay/ee6a67376f94c10c94e4761a342d4e03734849e8efc8843bbef69993d7b8404e/diff /var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-j7bwb_e3aa8650-ce39-4eb2-8cae-eb012347abb6/kube-rbac-proxy/0.log]; will not log again for this container unless duration exceeds 2s Dec 01 19:59:42 crc kubenswrapper[4935]: E1201 19:59:42.821360 4935 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 01 19:59:42 crc kubenswrapper[4935]: E1201 19:59:42.824133 4935 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6cm5m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(ed5fb4cf-a415-4429-af67-924e3f70cb3d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:59:42 crc kubenswrapper[4935]: E1201 19:59:42.825512 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="ed5fb4cf-a415-4429-af67-924e3f70cb3d" Dec 01 19:59:43 crc kubenswrapper[4935]: E1201 19:59:43.298064 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="ed5fb4cf-a415-4429-af67-924e3f70cb3d" Dec 01 19:59:57 crc kubenswrapper[4935]: I1201 19:59:57.187425 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 01 19:59:58 crc kubenswrapper[4935]: I1201 19:59:58.480324 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"ed5fb4cf-a415-4429-af67-924e3f70cb3d","Type":"ContainerStarted","Data":"1179400c9ecdbfed9dc0eb3c0bf9a4658797dbb22eaf862ac636c2e1e63cf728"} Dec 01 19:59:59 crc kubenswrapper[4935]: I1201 19:59:59.526697 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=5.453957855 podStartE2EDuration="1m6.526676381s" podCreationTimestamp="2025-12-01 19:58:53 +0000 UTC" firstStartedPulling="2025-12-01 19:58:56.111096932 +0000 UTC m=+5350.132726191" lastFinishedPulling="2025-12-01 19:59:57.183815438 +0000 UTC m=+5411.205444717" observedRunningTime="2025-12-01 19:59:59.515847788 +0000 UTC m=+5413.537477067" watchObservedRunningTime="2025-12-01 19:59:59.526676381 +0000 UTC m=+5413.548305640" Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.155938 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c"] Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.158294 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.160175 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.160950 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.173720 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c"] Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.296479 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-secret-volume\") pod \"collect-profiles-29410320-wrm4c\" (UID: \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.296546 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-config-volume\") pod \"collect-profiles-29410320-wrm4c\" (UID: \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.296627 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmvgk\" (UniqueName: \"kubernetes.io/projected/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-kube-api-access-nmvgk\") pod \"collect-profiles-29410320-wrm4c\" (UID: \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.399751 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-secret-volume\") pod \"collect-profiles-29410320-wrm4c\" (UID: \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.399853 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-config-volume\") pod \"collect-profiles-29410320-wrm4c\" (UID: \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.399960 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmvgk\" (UniqueName: \"kubernetes.io/projected/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-kube-api-access-nmvgk\") pod \"collect-profiles-29410320-wrm4c\" (UID: \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.400935 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-config-volume\") pod \"collect-profiles-29410320-wrm4c\" (UID: \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.415827 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-secret-volume\") pod \"collect-profiles-29410320-wrm4c\" (UID: \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.421700 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmvgk\" (UniqueName: \"kubernetes.io/projected/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-kube-api-access-nmvgk\") pod \"collect-profiles-29410320-wrm4c\" (UID: \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" Dec 01 20:00:00 crc kubenswrapper[4935]: I1201 20:00:00.499196 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" Dec 01 20:00:01 crc kubenswrapper[4935]: I1201 20:00:01.094526 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c"] Dec 01 20:00:01 crc kubenswrapper[4935]: I1201 20:00:01.524325 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" event={"ID":"2820a9b7-75d3-4a0b-bdc4-e6179adbea13","Type":"ContainerStarted","Data":"2e73e610c4cc0d0791acf68c7d9b3664026087f106e7d5c105b08e0eef29cf65"} Dec 01 20:00:01 crc kubenswrapper[4935]: I1201 20:00:01.524600 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" event={"ID":"2820a9b7-75d3-4a0b-bdc4-e6179adbea13","Type":"ContainerStarted","Data":"72f90d67ea57b4e3c76a1c44e17b9d045bcfa1820b8e9e11d248616469ece4d7"} Dec 01 20:00:01 crc kubenswrapper[4935]: I1201 20:00:01.542500 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" podStartSLOduration=1.542481151 podStartE2EDuration="1.542481151s" podCreationTimestamp="2025-12-01 20:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:00:01.53885379 +0000 UTC m=+5415.560483049" watchObservedRunningTime="2025-12-01 20:00:01.542481151 +0000 UTC m=+5415.564110400" Dec 01 20:00:02 crc kubenswrapper[4935]: I1201 20:00:02.536768 4935 generic.go:334] "Generic (PLEG): container finished" podID="2820a9b7-75d3-4a0b-bdc4-e6179adbea13" containerID="2e73e610c4cc0d0791acf68c7d9b3664026087f106e7d5c105b08e0eef29cf65" exitCode=0 Dec 01 20:00:02 crc kubenswrapper[4935]: I1201 20:00:02.536811 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" event={"ID":"2820a9b7-75d3-4a0b-bdc4-e6179adbea13","Type":"ContainerDied","Data":"2e73e610c4cc0d0791acf68c7d9b3664026087f106e7d5c105b08e0eef29cf65"} Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.053640 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.193948 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-secret-volume\") pod \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\" (UID: \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\") " Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.194306 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-config-volume\") pod \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\" (UID: \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\") " Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.194469 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmvgk\" (UniqueName: \"kubernetes.io/projected/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-kube-api-access-nmvgk\") pod \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\" (UID: \"2820a9b7-75d3-4a0b-bdc4-e6179adbea13\") " Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.195054 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-config-volume" (OuterVolumeSpecName: "config-volume") pod "2820a9b7-75d3-4a0b-bdc4-e6179adbea13" (UID: "2820a9b7-75d3-4a0b-bdc4-e6179adbea13"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.200666 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2820a9b7-75d3-4a0b-bdc4-e6179adbea13" (UID: "2820a9b7-75d3-4a0b-bdc4-e6179adbea13"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.201451 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-kube-api-access-nmvgk" (OuterVolumeSpecName: "kube-api-access-nmvgk") pod "2820a9b7-75d3-4a0b-bdc4-e6179adbea13" (UID: "2820a9b7-75d3-4a0b-bdc4-e6179adbea13"). InnerVolumeSpecName "kube-api-access-nmvgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.297547 4935 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.297588 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmvgk\" (UniqueName: \"kubernetes.io/projected/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-kube-api-access-nmvgk\") on node \"crc\" DevicePath \"\"" Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.297599 4935 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2820a9b7-75d3-4a0b-bdc4-e6179adbea13-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.567423 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" event={"ID":"2820a9b7-75d3-4a0b-bdc4-e6179adbea13","Type":"ContainerDied","Data":"72f90d67ea57b4e3c76a1c44e17b9d045bcfa1820b8e9e11d248616469ece4d7"} Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.567467 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72f90d67ea57b4e3c76a1c44e17b9d045bcfa1820b8e9e11d248616469ece4d7" Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.567469 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-wrm4c" Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.624431 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb"] Dec 01 20:00:04 crc kubenswrapper[4935]: I1201 20:00:04.635895 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410275-lzcjb"] Dec 01 20:00:06 crc kubenswrapper[4935]: I1201 20:00:06.524610 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6011b2ab-6f11-47bf-98ef-dabde15ec278" path="/var/lib/kubelet/pods/6011b2ab-6f11-47bf-98ef-dabde15ec278/volumes" Dec 01 20:00:12 crc kubenswrapper[4935]: I1201 20:00:12.304702 4935 scope.go:117] "RemoveContainer" containerID="41b9560bfd51da66c9b62dc3530eb876f12181193b636a2f6ad73e5acbcbafd6" Dec 01 20:00:24 crc kubenswrapper[4935]: I1201 20:00:24.346552 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:00:24 crc kubenswrapper[4935]: I1201 20:00:24.347188 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:00:54 crc kubenswrapper[4935]: I1201 20:00:54.346709 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:00:54 crc kubenswrapper[4935]: I1201 20:00:54.347324 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.175638 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29410321-6wpck"] Dec 01 20:01:00 crc kubenswrapper[4935]: E1201 20:01:00.177256 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2820a9b7-75d3-4a0b-bdc4-e6179adbea13" containerName="collect-profiles" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.177274 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="2820a9b7-75d3-4a0b-bdc4-e6179adbea13" containerName="collect-profiles" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.177593 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="2820a9b7-75d3-4a0b-bdc4-e6179adbea13" containerName="collect-profiles" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.178780 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.229087 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29410321-6wpck"] Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.320880 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-fernet-keys\") pod \"keystone-cron-29410321-6wpck\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.321186 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-config-data\") pod \"keystone-cron-29410321-6wpck\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.321351 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-combined-ca-bundle\") pod \"keystone-cron-29410321-6wpck\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.321443 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jmtk\" (UniqueName: \"kubernetes.io/projected/387582b3-2696-4540-9866-138a6ea4394e-kube-api-access-4jmtk\") pod \"keystone-cron-29410321-6wpck\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.423811 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-config-data\") pod \"keystone-cron-29410321-6wpck\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.423887 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-combined-ca-bundle\") pod \"keystone-cron-29410321-6wpck\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.423968 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jmtk\" (UniqueName: \"kubernetes.io/projected/387582b3-2696-4540-9866-138a6ea4394e-kube-api-access-4jmtk\") pod \"keystone-cron-29410321-6wpck\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.424066 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-fernet-keys\") pod \"keystone-cron-29410321-6wpck\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.442082 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-config-data\") pod \"keystone-cron-29410321-6wpck\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.442537 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-combined-ca-bundle\") pod \"keystone-cron-29410321-6wpck\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.443363 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-fernet-keys\") pod \"keystone-cron-29410321-6wpck\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.443867 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jmtk\" (UniqueName: \"kubernetes.io/projected/387582b3-2696-4540-9866-138a6ea4394e-kube-api-access-4jmtk\") pod \"keystone-cron-29410321-6wpck\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:00 crc kubenswrapper[4935]: I1201 20:01:00.502479 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:01 crc kubenswrapper[4935]: I1201 20:01:01.316292 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29410321-6wpck"] Dec 01 20:01:02 crc kubenswrapper[4935]: I1201 20:01:02.292521 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410321-6wpck" event={"ID":"387582b3-2696-4540-9866-138a6ea4394e","Type":"ContainerStarted","Data":"fe4ce9cfd9eccc5f187c1caa0c1dc80f2c7479ebae68ed6cb071abab1e6b2fc2"} Dec 01 20:01:02 crc kubenswrapper[4935]: I1201 20:01:02.293596 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410321-6wpck" event={"ID":"387582b3-2696-4540-9866-138a6ea4394e","Type":"ContainerStarted","Data":"60f1be3587ecfa9659f5714212b2b45365004816cc08129f4607c42506ac90f7"} Dec 01 20:01:02 crc kubenswrapper[4935]: I1201 20:01:02.321221 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29410321-6wpck" podStartSLOduration=2.321187358 podStartE2EDuration="2.321187358s" podCreationTimestamp="2025-12-01 20:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:01:02.308988385 +0000 UTC m=+5476.330617644" watchObservedRunningTime="2025-12-01 20:01:02.321187358 +0000 UTC m=+5476.342816617" Dec 01 20:01:04 crc kubenswrapper[4935]: I1201 20:01:04.320309 4935 generic.go:334] "Generic (PLEG): container finished" podID="387582b3-2696-4540-9866-138a6ea4394e" containerID="fe4ce9cfd9eccc5f187c1caa0c1dc80f2c7479ebae68ed6cb071abab1e6b2fc2" exitCode=0 Dec 01 20:01:04 crc kubenswrapper[4935]: I1201 20:01:04.320564 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410321-6wpck" event={"ID":"387582b3-2696-4540-9866-138a6ea4394e","Type":"ContainerDied","Data":"fe4ce9cfd9eccc5f187c1caa0c1dc80f2c7479ebae68ed6cb071abab1e6b2fc2"} Dec 01 20:01:05 crc kubenswrapper[4935]: I1201 20:01:05.836803 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:05 crc kubenswrapper[4935]: I1201 20:01:05.981097 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-combined-ca-bundle\") pod \"387582b3-2696-4540-9866-138a6ea4394e\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " Dec 01 20:01:05 crc kubenswrapper[4935]: I1201 20:01:05.981339 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jmtk\" (UniqueName: \"kubernetes.io/projected/387582b3-2696-4540-9866-138a6ea4394e-kube-api-access-4jmtk\") pod \"387582b3-2696-4540-9866-138a6ea4394e\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " Dec 01 20:01:05 crc kubenswrapper[4935]: I1201 20:01:05.981443 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-fernet-keys\") pod \"387582b3-2696-4540-9866-138a6ea4394e\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " Dec 01 20:01:05 crc kubenswrapper[4935]: I1201 20:01:05.981550 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-config-data\") pod \"387582b3-2696-4540-9866-138a6ea4394e\" (UID: \"387582b3-2696-4540-9866-138a6ea4394e\") " Dec 01 20:01:06 crc kubenswrapper[4935]: I1201 20:01:06.000914 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "387582b3-2696-4540-9866-138a6ea4394e" (UID: "387582b3-2696-4540-9866-138a6ea4394e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:01:06 crc kubenswrapper[4935]: I1201 20:01:06.001008 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/387582b3-2696-4540-9866-138a6ea4394e-kube-api-access-4jmtk" (OuterVolumeSpecName: "kube-api-access-4jmtk") pod "387582b3-2696-4540-9866-138a6ea4394e" (UID: "387582b3-2696-4540-9866-138a6ea4394e"). InnerVolumeSpecName "kube-api-access-4jmtk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:01:06 crc kubenswrapper[4935]: I1201 20:01:06.031505 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "387582b3-2696-4540-9866-138a6ea4394e" (UID: "387582b3-2696-4540-9866-138a6ea4394e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:01:06 crc kubenswrapper[4935]: I1201 20:01:06.077990 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-config-data" (OuterVolumeSpecName: "config-data") pod "387582b3-2696-4540-9866-138a6ea4394e" (UID: "387582b3-2696-4540-9866-138a6ea4394e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:01:06 crc kubenswrapper[4935]: I1201 20:01:06.084957 4935 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:01:06 crc kubenswrapper[4935]: I1201 20:01:06.085009 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jmtk\" (UniqueName: \"kubernetes.io/projected/387582b3-2696-4540-9866-138a6ea4394e-kube-api-access-4jmtk\") on node \"crc\" DevicePath \"\"" Dec 01 20:01:06 crc kubenswrapper[4935]: I1201 20:01:06.085026 4935 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 20:01:06 crc kubenswrapper[4935]: I1201 20:01:06.085040 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/387582b3-2696-4540-9866-138a6ea4394e-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:01:06 crc kubenswrapper[4935]: I1201 20:01:06.354747 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410321-6wpck" event={"ID":"387582b3-2696-4540-9866-138a6ea4394e","Type":"ContainerDied","Data":"60f1be3587ecfa9659f5714212b2b45365004816cc08129f4607c42506ac90f7"} Dec 01 20:01:06 crc kubenswrapper[4935]: I1201 20:01:06.355385 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60f1be3587ecfa9659f5714212b2b45365004816cc08129f4607c42506ac90f7" Dec 01 20:01:06 crc kubenswrapper[4935]: I1201 20:01:06.354795 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410321-6wpck" Dec 01 20:01:24 crc kubenswrapper[4935]: I1201 20:01:24.345864 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:01:24 crc kubenswrapper[4935]: I1201 20:01:24.346460 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:01:24 crc kubenswrapper[4935]: I1201 20:01:24.346509 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 20:01:24 crc kubenswrapper[4935]: I1201 20:01:24.347493 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6b82e4de433a4b2d1c72ed1d6c320adde8e78c24624f68417ba5f43831f3c3cf"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:01:24 crc kubenswrapper[4935]: I1201 20:01:24.348236 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://6b82e4de433a4b2d1c72ed1d6c320adde8e78c24624f68417ba5f43831f3c3cf" gracePeriod=600 Dec 01 20:01:24 crc kubenswrapper[4935]: I1201 20:01:24.600473 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="6b82e4de433a4b2d1c72ed1d6c320adde8e78c24624f68417ba5f43831f3c3cf" exitCode=0 Dec 01 20:01:24 crc kubenswrapper[4935]: I1201 20:01:24.600516 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"6b82e4de433a4b2d1c72ed1d6c320adde8e78c24624f68417ba5f43831f3c3cf"} Dec 01 20:01:24 crc kubenswrapper[4935]: I1201 20:01:24.600826 4935 scope.go:117] "RemoveContainer" containerID="6b718c8b7d6c9ee33b8e2a94f3d828f849f73265a13c7bfec564a57eb9a94185" Dec 01 20:01:25 crc kubenswrapper[4935]: I1201 20:01:25.614541 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda"} Dec 01 20:03:24 crc kubenswrapper[4935]: I1201 20:03:24.346981 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:03:24 crc kubenswrapper[4935]: I1201 20:03:24.349293 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.110428 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-k2bsc"] Dec 01 20:03:34 crc kubenswrapper[4935]: E1201 20:03:34.112582 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="387582b3-2696-4540-9866-138a6ea4394e" containerName="keystone-cron" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.112826 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="387582b3-2696-4540-9866-138a6ea4394e" containerName="keystone-cron" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.113538 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="387582b3-2696-4540-9866-138a6ea4394e" containerName="keystone-cron" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.117022 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.131226 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k2bsc"] Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.241962 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-catalog-content\") pod \"certified-operators-k2bsc\" (UID: \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\") " pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.242137 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqqlp\" (UniqueName: \"kubernetes.io/projected/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-kube-api-access-wqqlp\") pod \"certified-operators-k2bsc\" (UID: \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\") " pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.242804 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-utilities\") pod \"certified-operators-k2bsc\" (UID: \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\") " pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.344456 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-catalog-content\") pod \"certified-operators-k2bsc\" (UID: \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\") " pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.344537 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqqlp\" (UniqueName: \"kubernetes.io/projected/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-kube-api-access-wqqlp\") pod \"certified-operators-k2bsc\" (UID: \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\") " pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.344737 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-utilities\") pod \"certified-operators-k2bsc\" (UID: \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\") " pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.346836 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-catalog-content\") pod \"certified-operators-k2bsc\" (UID: \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\") " pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.347281 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-utilities\") pod \"certified-operators-k2bsc\" (UID: \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\") " pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.384666 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqqlp\" (UniqueName: \"kubernetes.io/projected/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-kube-api-access-wqqlp\") pod \"certified-operators-k2bsc\" (UID: \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\") " pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:34 crc kubenswrapper[4935]: I1201 20:03:34.462233 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:35 crc kubenswrapper[4935]: I1201 20:03:35.057863 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k2bsc"] Dec 01 20:03:35 crc kubenswrapper[4935]: I1201 20:03:35.306439 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2bsc" event={"ID":"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1","Type":"ContainerStarted","Data":"e8de26dfda0b7bf91881e30f7df12a202cd846a6486ed88a1a81f05ca9e3f2b0"} Dec 01 20:03:36 crc kubenswrapper[4935]: I1201 20:03:36.319551 4935 generic.go:334] "Generic (PLEG): container finished" podID="fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" containerID="a2d256e1727bdae02ed0a75879096ca21a1c2a25a6a3958924b184f69feaeeaa" exitCode=0 Dec 01 20:03:36 crc kubenswrapper[4935]: I1201 20:03:36.319686 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2bsc" event={"ID":"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1","Type":"ContainerDied","Data":"a2d256e1727bdae02ed0a75879096ca21a1c2a25a6a3958924b184f69feaeeaa"} Dec 01 20:03:36 crc kubenswrapper[4935]: I1201 20:03:36.324737 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:03:37 crc kubenswrapper[4935]: I1201 20:03:37.334706 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2bsc" event={"ID":"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1","Type":"ContainerStarted","Data":"d94b0550b7a8bed737b11c515dbff593328f7a8186ec2291923491514472a85b"} Dec 01 20:03:38 crc kubenswrapper[4935]: I1201 20:03:38.352932 4935 generic.go:334] "Generic (PLEG): container finished" podID="fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" containerID="d94b0550b7a8bed737b11c515dbff593328f7a8186ec2291923491514472a85b" exitCode=0 Dec 01 20:03:38 crc kubenswrapper[4935]: I1201 20:03:38.353062 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2bsc" event={"ID":"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1","Type":"ContainerDied","Data":"d94b0550b7a8bed737b11c515dbff593328f7a8186ec2291923491514472a85b"} Dec 01 20:03:39 crc kubenswrapper[4935]: I1201 20:03:39.370779 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2bsc" event={"ID":"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1","Type":"ContainerStarted","Data":"cf7dda70ac711e62dc7368ab0e5797c1b2372631fdb17da85d048c32d9da7b88"} Dec 01 20:03:39 crc kubenswrapper[4935]: I1201 20:03:39.404376 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-k2bsc" podStartSLOduration=2.706282249 podStartE2EDuration="5.404338883s" podCreationTimestamp="2025-12-01 20:03:34 +0000 UTC" firstStartedPulling="2025-12-01 20:03:36.322961166 +0000 UTC m=+5630.344590435" lastFinishedPulling="2025-12-01 20:03:39.02101781 +0000 UTC m=+5633.042647069" observedRunningTime="2025-12-01 20:03:39.396311538 +0000 UTC m=+5633.417940807" watchObservedRunningTime="2025-12-01 20:03:39.404338883 +0000 UTC m=+5633.425968142" Dec 01 20:03:44 crc kubenswrapper[4935]: I1201 20:03:44.463952 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:44 crc kubenswrapper[4935]: I1201 20:03:44.464781 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:44 crc kubenswrapper[4935]: I1201 20:03:44.553798 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:45 crc kubenswrapper[4935]: I1201 20:03:45.515336 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:45 crc kubenswrapper[4935]: I1201 20:03:45.584076 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k2bsc"] Dec 01 20:03:47 crc kubenswrapper[4935]: I1201 20:03:47.484755 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-k2bsc" podUID="fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" containerName="registry-server" containerID="cri-o://cf7dda70ac711e62dc7368ab0e5797c1b2372631fdb17da85d048c32d9da7b88" gracePeriod=2 Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.167622 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.235819 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqqlp\" (UniqueName: \"kubernetes.io/projected/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-kube-api-access-wqqlp\") pod \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\" (UID: \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\") " Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.236191 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-catalog-content\") pod \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\" (UID: \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\") " Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.236362 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-utilities\") pod \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\" (UID: \"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1\") " Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.237895 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-utilities" (OuterVolumeSpecName: "utilities") pod "fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" (UID: "fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.260951 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-kube-api-access-wqqlp" (OuterVolumeSpecName: "kube-api-access-wqqlp") pod "fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" (UID: "fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1"). InnerVolumeSpecName "kube-api-access-wqqlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.322436 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" (UID: "fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.339462 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.339506 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.339518 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqqlp\" (UniqueName: \"kubernetes.io/projected/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1-kube-api-access-wqqlp\") on node \"crc\" DevicePath \"\"" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.499452 4935 generic.go:334] "Generic (PLEG): container finished" podID="fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" containerID="cf7dda70ac711e62dc7368ab0e5797c1b2372631fdb17da85d048c32d9da7b88" exitCode=0 Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.499553 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2bsc" event={"ID":"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1","Type":"ContainerDied","Data":"cf7dda70ac711e62dc7368ab0e5797c1b2372631fdb17da85d048c32d9da7b88"} Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.501108 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k2bsc" event={"ID":"fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1","Type":"ContainerDied","Data":"e8de26dfda0b7bf91881e30f7df12a202cd846a6486ed88a1a81f05ca9e3f2b0"} Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.501176 4935 scope.go:117] "RemoveContainer" containerID="cf7dda70ac711e62dc7368ab0e5797c1b2372631fdb17da85d048c32d9da7b88" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.499593 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k2bsc" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.538361 4935 scope.go:117] "RemoveContainer" containerID="d94b0550b7a8bed737b11c515dbff593328f7a8186ec2291923491514472a85b" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.561259 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k2bsc"] Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.573214 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-k2bsc"] Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.576417 4935 scope.go:117] "RemoveContainer" containerID="a2d256e1727bdae02ed0a75879096ca21a1c2a25a6a3958924b184f69feaeeaa" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.628855 4935 scope.go:117] "RemoveContainer" containerID="cf7dda70ac711e62dc7368ab0e5797c1b2372631fdb17da85d048c32d9da7b88" Dec 01 20:03:48 crc kubenswrapper[4935]: E1201 20:03:48.630628 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf7dda70ac711e62dc7368ab0e5797c1b2372631fdb17da85d048c32d9da7b88\": container with ID starting with cf7dda70ac711e62dc7368ab0e5797c1b2372631fdb17da85d048c32d9da7b88 not found: ID does not exist" containerID="cf7dda70ac711e62dc7368ab0e5797c1b2372631fdb17da85d048c32d9da7b88" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.630666 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf7dda70ac711e62dc7368ab0e5797c1b2372631fdb17da85d048c32d9da7b88"} err="failed to get container status \"cf7dda70ac711e62dc7368ab0e5797c1b2372631fdb17da85d048c32d9da7b88\": rpc error: code = NotFound desc = could not find container \"cf7dda70ac711e62dc7368ab0e5797c1b2372631fdb17da85d048c32d9da7b88\": container with ID starting with cf7dda70ac711e62dc7368ab0e5797c1b2372631fdb17da85d048c32d9da7b88 not found: ID does not exist" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.630687 4935 scope.go:117] "RemoveContainer" containerID="d94b0550b7a8bed737b11c515dbff593328f7a8186ec2291923491514472a85b" Dec 01 20:03:48 crc kubenswrapper[4935]: E1201 20:03:48.631043 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d94b0550b7a8bed737b11c515dbff593328f7a8186ec2291923491514472a85b\": container with ID starting with d94b0550b7a8bed737b11c515dbff593328f7a8186ec2291923491514472a85b not found: ID does not exist" containerID="d94b0550b7a8bed737b11c515dbff593328f7a8186ec2291923491514472a85b" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.631067 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d94b0550b7a8bed737b11c515dbff593328f7a8186ec2291923491514472a85b"} err="failed to get container status \"d94b0550b7a8bed737b11c515dbff593328f7a8186ec2291923491514472a85b\": rpc error: code = NotFound desc = could not find container \"d94b0550b7a8bed737b11c515dbff593328f7a8186ec2291923491514472a85b\": container with ID starting with d94b0550b7a8bed737b11c515dbff593328f7a8186ec2291923491514472a85b not found: ID does not exist" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.631083 4935 scope.go:117] "RemoveContainer" containerID="a2d256e1727bdae02ed0a75879096ca21a1c2a25a6a3958924b184f69feaeeaa" Dec 01 20:03:48 crc kubenswrapper[4935]: E1201 20:03:48.631467 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2d256e1727bdae02ed0a75879096ca21a1c2a25a6a3958924b184f69feaeeaa\": container with ID starting with a2d256e1727bdae02ed0a75879096ca21a1c2a25a6a3958924b184f69feaeeaa not found: ID does not exist" containerID="a2d256e1727bdae02ed0a75879096ca21a1c2a25a6a3958924b184f69feaeeaa" Dec 01 20:03:48 crc kubenswrapper[4935]: I1201 20:03:48.631527 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2d256e1727bdae02ed0a75879096ca21a1c2a25a6a3958924b184f69feaeeaa"} err="failed to get container status \"a2d256e1727bdae02ed0a75879096ca21a1c2a25a6a3958924b184f69feaeeaa\": rpc error: code = NotFound desc = could not find container \"a2d256e1727bdae02ed0a75879096ca21a1c2a25a6a3958924b184f69feaeeaa\": container with ID starting with a2d256e1727bdae02ed0a75879096ca21a1c2a25a6a3958924b184f69feaeeaa not found: ID does not exist" Dec 01 20:03:50 crc kubenswrapper[4935]: I1201 20:03:50.523490 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" path="/var/lib/kubelet/pods/fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1/volumes" Dec 01 20:03:54 crc kubenswrapper[4935]: I1201 20:03:54.346107 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:03:54 crc kubenswrapper[4935]: I1201 20:03:54.346764 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:04:24 crc kubenswrapper[4935]: I1201 20:04:24.346486 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:04:24 crc kubenswrapper[4935]: I1201 20:04:24.347249 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:04:24 crc kubenswrapper[4935]: I1201 20:04:24.347319 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 20:04:24 crc kubenswrapper[4935]: I1201 20:04:24.348547 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:04:24 crc kubenswrapper[4935]: I1201 20:04:24.348637 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" gracePeriod=600 Dec 01 20:04:24 crc kubenswrapper[4935]: E1201 20:04:24.995343 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:04:25 crc kubenswrapper[4935]: I1201 20:04:25.017368 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" exitCode=0 Dec 01 20:04:25 crc kubenswrapper[4935]: I1201 20:04:25.017418 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda"} Dec 01 20:04:25 crc kubenswrapper[4935]: I1201 20:04:25.017475 4935 scope.go:117] "RemoveContainer" containerID="6b82e4de433a4b2d1c72ed1d6c320adde8e78c24624f68417ba5f43831f3c3cf" Dec 01 20:04:25 crc kubenswrapper[4935]: I1201 20:04:25.018481 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:04:25 crc kubenswrapper[4935]: E1201 20:04:25.018880 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:04:35 crc kubenswrapper[4935]: I1201 20:04:35.509603 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:04:35 crc kubenswrapper[4935]: E1201 20:04:35.510629 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:04:46 crc kubenswrapper[4935]: I1201 20:04:46.519754 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:04:46 crc kubenswrapper[4935]: E1201 20:04:46.520697 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:05:01 crc kubenswrapper[4935]: I1201 20:05:01.509578 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:05:01 crc kubenswrapper[4935]: E1201 20:05:01.511209 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:05:13 crc kubenswrapper[4935]: I1201 20:05:13.509355 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:05:13 crc kubenswrapper[4935]: E1201 20:05:13.510400 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:05:26 crc kubenswrapper[4935]: I1201 20:05:26.522486 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:05:26 crc kubenswrapper[4935]: E1201 20:05:26.523559 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:05:26 crc kubenswrapper[4935]: I1201 20:05:26.946878 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cjjcx"] Dec 01 20:05:26 crc kubenswrapper[4935]: E1201 20:05:26.947760 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" containerName="extract-content" Dec 01 20:05:26 crc kubenswrapper[4935]: I1201 20:05:26.947788 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" containerName="extract-content" Dec 01 20:05:26 crc kubenswrapper[4935]: E1201 20:05:26.947839 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" containerName="registry-server" Dec 01 20:05:26 crc kubenswrapper[4935]: I1201 20:05:26.947848 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" containerName="registry-server" Dec 01 20:05:26 crc kubenswrapper[4935]: E1201 20:05:26.947866 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" containerName="extract-utilities" Dec 01 20:05:26 crc kubenswrapper[4935]: I1201 20:05:26.947874 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" containerName="extract-utilities" Dec 01 20:05:26 crc kubenswrapper[4935]: I1201 20:05:26.948140 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb293315-d620-4c7a-b0f2-c3d2a9a6c4f1" containerName="registry-server" Dec 01 20:05:26 crc kubenswrapper[4935]: I1201 20:05:26.950264 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:26 crc kubenswrapper[4935]: I1201 20:05:26.970624 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cjjcx"] Dec 01 20:05:27 crc kubenswrapper[4935]: I1201 20:05:27.072876 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-utilities\") pod \"redhat-marketplace-cjjcx\" (UID: \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\") " pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:27 crc kubenswrapper[4935]: I1201 20:05:27.073018 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-catalog-content\") pod \"redhat-marketplace-cjjcx\" (UID: \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\") " pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:27 crc kubenswrapper[4935]: I1201 20:05:27.073053 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dp824\" (UniqueName: \"kubernetes.io/projected/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-kube-api-access-dp824\") pod \"redhat-marketplace-cjjcx\" (UID: \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\") " pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:27 crc kubenswrapper[4935]: I1201 20:05:27.175123 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-utilities\") pod \"redhat-marketplace-cjjcx\" (UID: \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\") " pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:27 crc kubenswrapper[4935]: I1201 20:05:27.175438 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-catalog-content\") pod \"redhat-marketplace-cjjcx\" (UID: \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\") " pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:27 crc kubenswrapper[4935]: I1201 20:05:27.175460 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dp824\" (UniqueName: \"kubernetes.io/projected/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-kube-api-access-dp824\") pod \"redhat-marketplace-cjjcx\" (UID: \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\") " pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:27 crc kubenswrapper[4935]: I1201 20:05:27.175568 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-utilities\") pod \"redhat-marketplace-cjjcx\" (UID: \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\") " pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:27 crc kubenswrapper[4935]: I1201 20:05:27.175785 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-catalog-content\") pod \"redhat-marketplace-cjjcx\" (UID: \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\") " pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:27 crc kubenswrapper[4935]: I1201 20:05:27.196625 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dp824\" (UniqueName: \"kubernetes.io/projected/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-kube-api-access-dp824\") pod \"redhat-marketplace-cjjcx\" (UID: \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\") " pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:27 crc kubenswrapper[4935]: I1201 20:05:27.281231 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:27 crc kubenswrapper[4935]: I1201 20:05:27.821324 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cjjcx"] Dec 01 20:05:28 crc kubenswrapper[4935]: I1201 20:05:28.839357 4935 generic.go:334] "Generic (PLEG): container finished" podID="e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" containerID="d631975d2a595e1ddf4348c118e7ec3a1610f8c0e610e924e346e60130d776c1" exitCode=0 Dec 01 20:05:28 crc kubenswrapper[4935]: I1201 20:05:28.839543 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cjjcx" event={"ID":"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1","Type":"ContainerDied","Data":"d631975d2a595e1ddf4348c118e7ec3a1610f8c0e610e924e346e60130d776c1"} Dec 01 20:05:28 crc kubenswrapper[4935]: I1201 20:05:28.839896 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cjjcx" event={"ID":"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1","Type":"ContainerStarted","Data":"4a447c5a30985c854eb2ad73537c216627c584a7620f2d4aa30a528eb10fff7f"} Dec 01 20:05:30 crc kubenswrapper[4935]: I1201 20:05:30.865492 4935 generic.go:334] "Generic (PLEG): container finished" podID="e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" containerID="97d8931a287c9b66398549e08e093b8832b62e9875f85ffe400256a5ef32639d" exitCode=0 Dec 01 20:05:30 crc kubenswrapper[4935]: I1201 20:05:30.865606 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cjjcx" event={"ID":"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1","Type":"ContainerDied","Data":"97d8931a287c9b66398549e08e093b8832b62e9875f85ffe400256a5ef32639d"} Dec 01 20:05:32 crc kubenswrapper[4935]: I1201 20:05:32.894195 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cjjcx" event={"ID":"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1","Type":"ContainerStarted","Data":"313e7d50579177fd71fef6195d28eac6fd594a27e15a2ee5da9dc19497775475"} Dec 01 20:05:32 crc kubenswrapper[4935]: I1201 20:05:32.914375 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cjjcx" podStartSLOduration=3.5954632330000003 podStartE2EDuration="6.914357367s" podCreationTimestamp="2025-12-01 20:05:26 +0000 UTC" firstStartedPulling="2025-12-01 20:05:28.841972529 +0000 UTC m=+5742.863601808" lastFinishedPulling="2025-12-01 20:05:32.160866673 +0000 UTC m=+5746.182495942" observedRunningTime="2025-12-01 20:05:32.911078686 +0000 UTC m=+5746.932707955" watchObservedRunningTime="2025-12-01 20:05:32.914357367 +0000 UTC m=+5746.935986626" Dec 01 20:05:37 crc kubenswrapper[4935]: I1201 20:05:37.282275 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:37 crc kubenswrapper[4935]: I1201 20:05:37.282990 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:37 crc kubenswrapper[4935]: I1201 20:05:37.333581 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:38 crc kubenswrapper[4935]: I1201 20:05:38.047696 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:38 crc kubenswrapper[4935]: I1201 20:05:38.106082 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cjjcx"] Dec 01 20:05:38 crc kubenswrapper[4935]: I1201 20:05:38.507898 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:05:38 crc kubenswrapper[4935]: E1201 20:05:38.508316 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:05:39 crc kubenswrapper[4935]: I1201 20:05:39.987433 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cjjcx" podUID="e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" containerName="registry-server" containerID="cri-o://313e7d50579177fd71fef6195d28eac6fd594a27e15a2ee5da9dc19497775475" gracePeriod=2 Dec 01 20:05:41 crc kubenswrapper[4935]: I1201 20:05:41.004270 4935 generic.go:334] "Generic (PLEG): container finished" podID="e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" containerID="313e7d50579177fd71fef6195d28eac6fd594a27e15a2ee5da9dc19497775475" exitCode=0 Dec 01 20:05:41 crc kubenswrapper[4935]: I1201 20:05:41.004367 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cjjcx" event={"ID":"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1","Type":"ContainerDied","Data":"313e7d50579177fd71fef6195d28eac6fd594a27e15a2ee5da9dc19497775475"} Dec 01 20:05:41 crc kubenswrapper[4935]: I1201 20:05:41.444211 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:41 crc kubenswrapper[4935]: I1201 20:05:41.566947 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dp824\" (UniqueName: \"kubernetes.io/projected/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-kube-api-access-dp824\") pod \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\" (UID: \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\") " Dec 01 20:05:41 crc kubenswrapper[4935]: I1201 20:05:41.566999 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-catalog-content\") pod \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\" (UID: \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\") " Dec 01 20:05:41 crc kubenswrapper[4935]: I1201 20:05:41.567112 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-utilities\") pod \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\" (UID: \"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1\") " Dec 01 20:05:41 crc kubenswrapper[4935]: I1201 20:05:41.568149 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-utilities" (OuterVolumeSpecName: "utilities") pod "e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" (UID: "e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:05:41 crc kubenswrapper[4935]: I1201 20:05:41.574008 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-kube-api-access-dp824" (OuterVolumeSpecName: "kube-api-access-dp824") pod "e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" (UID: "e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1"). InnerVolumeSpecName "kube-api-access-dp824". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:05:41 crc kubenswrapper[4935]: I1201 20:05:41.594398 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" (UID: "e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:05:41 crc kubenswrapper[4935]: I1201 20:05:41.670326 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:05:41 crc kubenswrapper[4935]: I1201 20:05:41.670387 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dp824\" (UniqueName: \"kubernetes.io/projected/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-kube-api-access-dp824\") on node \"crc\" DevicePath \"\"" Dec 01 20:05:41 crc kubenswrapper[4935]: I1201 20:05:41.670404 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:05:42 crc kubenswrapper[4935]: I1201 20:05:42.017534 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cjjcx" event={"ID":"e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1","Type":"ContainerDied","Data":"4a447c5a30985c854eb2ad73537c216627c584a7620f2d4aa30a528eb10fff7f"} Dec 01 20:05:42 crc kubenswrapper[4935]: I1201 20:05:42.017591 4935 scope.go:117] "RemoveContainer" containerID="313e7d50579177fd71fef6195d28eac6fd594a27e15a2ee5da9dc19497775475" Dec 01 20:05:42 crc kubenswrapper[4935]: I1201 20:05:42.017698 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cjjcx" Dec 01 20:05:42 crc kubenswrapper[4935]: I1201 20:05:42.044959 4935 scope.go:117] "RemoveContainer" containerID="97d8931a287c9b66398549e08e093b8832b62e9875f85ffe400256a5ef32639d" Dec 01 20:05:42 crc kubenswrapper[4935]: I1201 20:05:42.058701 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cjjcx"] Dec 01 20:05:42 crc kubenswrapper[4935]: I1201 20:05:42.072134 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cjjcx"] Dec 01 20:05:42 crc kubenswrapper[4935]: I1201 20:05:42.525253 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" path="/var/lib/kubelet/pods/e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1/volumes" Dec 01 20:05:42 crc kubenswrapper[4935]: I1201 20:05:42.713278 4935 scope.go:117] "RemoveContainer" containerID="d631975d2a595e1ddf4348c118e7ec3a1610f8c0e610e924e346e60130d776c1" Dec 01 20:05:49 crc kubenswrapper[4935]: I1201 20:05:49.508297 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:05:49 crc kubenswrapper[4935]: E1201 20:05:49.510558 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:06:00 crc kubenswrapper[4935]: I1201 20:06:00.509901 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:06:00 crc kubenswrapper[4935]: E1201 20:06:00.510726 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:06:12 crc kubenswrapper[4935]: I1201 20:06:12.508769 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:06:12 crc kubenswrapper[4935]: E1201 20:06:12.509682 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:06:23 crc kubenswrapper[4935]: I1201 20:06:23.509837 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:06:23 crc kubenswrapper[4935]: E1201 20:06:23.510631 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:06:35 crc kubenswrapper[4935]: I1201 20:06:35.510235 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:06:35 crc kubenswrapper[4935]: E1201 20:06:35.512003 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:06:49 crc kubenswrapper[4935]: I1201 20:06:49.508740 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:06:49 crc kubenswrapper[4935]: E1201 20:06:49.509621 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:07:04 crc kubenswrapper[4935]: I1201 20:07:04.508866 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:07:04 crc kubenswrapper[4935]: E1201 20:07:04.510298 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.057233 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b6pqz"] Dec 01 20:07:15 crc kubenswrapper[4935]: E1201 20:07:15.058291 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" containerName="extract-content" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.058306 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" containerName="extract-content" Dec 01 20:07:15 crc kubenswrapper[4935]: E1201 20:07:15.058339 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" containerName="extract-utilities" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.058345 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" containerName="extract-utilities" Dec 01 20:07:15 crc kubenswrapper[4935]: E1201 20:07:15.058360 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" containerName="registry-server" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.058367 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" containerName="registry-server" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.058620 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6c552ea-dc09-48b6-8b84-6fe1d7b5bad1" containerName="registry-server" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.060286 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.082708 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b6pqz"] Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.157514 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-catalog-content\") pod \"redhat-operators-b6pqz\" (UID: \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\") " pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.158083 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-utilities\") pod \"redhat-operators-b6pqz\" (UID: \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\") " pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.158115 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqphg\" (UniqueName: \"kubernetes.io/projected/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-kube-api-access-pqphg\") pod \"redhat-operators-b6pqz\" (UID: \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\") " pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.260316 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-utilities\") pod \"redhat-operators-b6pqz\" (UID: \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\") " pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.260368 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqphg\" (UniqueName: \"kubernetes.io/projected/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-kube-api-access-pqphg\") pod \"redhat-operators-b6pqz\" (UID: \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\") " pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.260424 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-catalog-content\") pod \"redhat-operators-b6pqz\" (UID: \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\") " pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.260945 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-utilities\") pod \"redhat-operators-b6pqz\" (UID: \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\") " pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.261003 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-catalog-content\") pod \"redhat-operators-b6pqz\" (UID: \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\") " pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.281696 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqphg\" (UniqueName: \"kubernetes.io/projected/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-kube-api-access-pqphg\") pod \"redhat-operators-b6pqz\" (UID: \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\") " pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.387884 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:15 crc kubenswrapper[4935]: I1201 20:07:15.905558 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b6pqz"] Dec 01 20:07:16 crc kubenswrapper[4935]: I1201 20:07:16.172716 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6pqz" event={"ID":"cc15c87d-8ffe-40e0-a8f9-11ed099e5668","Type":"ContainerStarted","Data":"5664943591be1458f167b12a3d2a3b8cf2d49dc7be2222e09052c5e643953e2d"} Dec 01 20:07:16 crc kubenswrapper[4935]: I1201 20:07:16.173084 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6pqz" event={"ID":"cc15c87d-8ffe-40e0-a8f9-11ed099e5668","Type":"ContainerStarted","Data":"95f40e2ce3d99f609ddb0efd0d7166e86b54d769a811d8816995b579a7ad814f"} Dec 01 20:07:17 crc kubenswrapper[4935]: I1201 20:07:17.186545 4935 generic.go:334] "Generic (PLEG): container finished" podID="cc15c87d-8ffe-40e0-a8f9-11ed099e5668" containerID="5664943591be1458f167b12a3d2a3b8cf2d49dc7be2222e09052c5e643953e2d" exitCode=0 Dec 01 20:07:17 crc kubenswrapper[4935]: I1201 20:07:17.186649 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6pqz" event={"ID":"cc15c87d-8ffe-40e0-a8f9-11ed099e5668","Type":"ContainerDied","Data":"5664943591be1458f167b12a3d2a3b8cf2d49dc7be2222e09052c5e643953e2d"} Dec 01 20:07:19 crc kubenswrapper[4935]: I1201 20:07:19.209223 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6pqz" event={"ID":"cc15c87d-8ffe-40e0-a8f9-11ed099e5668","Type":"ContainerStarted","Data":"c29e4d5dc984f00f58d68b2339183a9131edd12d4f0f86b9d94a9dc84aa6f321"} Dec 01 20:07:19 crc kubenswrapper[4935]: I1201 20:07:19.508945 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:07:19 crc kubenswrapper[4935]: E1201 20:07:19.509729 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:07:23 crc kubenswrapper[4935]: I1201 20:07:23.254339 4935 generic.go:334] "Generic (PLEG): container finished" podID="cc15c87d-8ffe-40e0-a8f9-11ed099e5668" containerID="c29e4d5dc984f00f58d68b2339183a9131edd12d4f0f86b9d94a9dc84aa6f321" exitCode=0 Dec 01 20:07:23 crc kubenswrapper[4935]: I1201 20:07:23.254422 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6pqz" event={"ID":"cc15c87d-8ffe-40e0-a8f9-11ed099e5668","Type":"ContainerDied","Data":"c29e4d5dc984f00f58d68b2339183a9131edd12d4f0f86b9d94a9dc84aa6f321"} Dec 01 20:07:24 crc kubenswrapper[4935]: I1201 20:07:24.266697 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6pqz" event={"ID":"cc15c87d-8ffe-40e0-a8f9-11ed099e5668","Type":"ContainerStarted","Data":"ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4"} Dec 01 20:07:24 crc kubenswrapper[4935]: I1201 20:07:24.289971 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b6pqz" podStartSLOduration=2.6641788220000002 podStartE2EDuration="9.289947567s" podCreationTimestamp="2025-12-01 20:07:15 +0000 UTC" firstStartedPulling="2025-12-01 20:07:17.189964967 +0000 UTC m=+5851.211594226" lastFinishedPulling="2025-12-01 20:07:23.815733712 +0000 UTC m=+5857.837362971" observedRunningTime="2025-12-01 20:07:24.284296884 +0000 UTC m=+5858.305926163" watchObservedRunningTime="2025-12-01 20:07:24.289947567 +0000 UTC m=+5858.311576826" Dec 01 20:07:25 crc kubenswrapper[4935]: I1201 20:07:25.388271 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:25 crc kubenswrapper[4935]: I1201 20:07:25.388651 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:26 crc kubenswrapper[4935]: I1201 20:07:26.449470 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-b6pqz" podUID="cc15c87d-8ffe-40e0-a8f9-11ed099e5668" containerName="registry-server" probeResult="failure" output=< Dec 01 20:07:26 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 20:07:26 crc kubenswrapper[4935]: > Dec 01 20:07:31 crc kubenswrapper[4935]: I1201 20:07:31.508101 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:07:31 crc kubenswrapper[4935]: E1201 20:07:31.509232 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:07:35 crc kubenswrapper[4935]: I1201 20:07:35.439932 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:35 crc kubenswrapper[4935]: I1201 20:07:35.492223 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:35 crc kubenswrapper[4935]: I1201 20:07:35.684621 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b6pqz"] Dec 01 20:07:37 crc kubenswrapper[4935]: I1201 20:07:37.401562 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b6pqz" podUID="cc15c87d-8ffe-40e0-a8f9-11ed099e5668" containerName="registry-server" containerID="cri-o://ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4" gracePeriod=2 Dec 01 20:07:37 crc kubenswrapper[4935]: E1201 20:07:37.625649 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc15c87d_8ffe_40e0_a8f9_11ed099e5668.slice/crio-ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc15c87d_8ffe_40e0_a8f9_11ed099e5668.slice/crio-conmon-ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4.scope\": RecentStats: unable to find data in memory cache]" Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.054912 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.173828 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-utilities\") pod \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\" (UID: \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\") " Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.174093 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqphg\" (UniqueName: \"kubernetes.io/projected/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-kube-api-access-pqphg\") pod \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\" (UID: \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\") " Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.174297 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-catalog-content\") pod \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\" (UID: \"cc15c87d-8ffe-40e0-a8f9-11ed099e5668\") " Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.174677 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-utilities" (OuterVolumeSpecName: "utilities") pod "cc15c87d-8ffe-40e0-a8f9-11ed099e5668" (UID: "cc15c87d-8ffe-40e0-a8f9-11ed099e5668"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.174995 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.182457 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-kube-api-access-pqphg" (OuterVolumeSpecName: "kube-api-access-pqphg") pod "cc15c87d-8ffe-40e0-a8f9-11ed099e5668" (UID: "cc15c87d-8ffe-40e0-a8f9-11ed099e5668"). InnerVolumeSpecName "kube-api-access-pqphg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.277323 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqphg\" (UniqueName: \"kubernetes.io/projected/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-kube-api-access-pqphg\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.307869 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cc15c87d-8ffe-40e0-a8f9-11ed099e5668" (UID: "cc15c87d-8ffe-40e0-a8f9-11ed099e5668"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.379232 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cc15c87d-8ffe-40e0-a8f9-11ed099e5668-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.413481 4935 generic.go:334] "Generic (PLEG): container finished" podID="cc15c87d-8ffe-40e0-a8f9-11ed099e5668" containerID="ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4" exitCode=0 Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.413532 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6pqz" event={"ID":"cc15c87d-8ffe-40e0-a8f9-11ed099e5668","Type":"ContainerDied","Data":"ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4"} Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.413603 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b6pqz" event={"ID":"cc15c87d-8ffe-40e0-a8f9-11ed099e5668","Type":"ContainerDied","Data":"95f40e2ce3d99f609ddb0efd0d7166e86b54d769a811d8816995b579a7ad814f"} Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.413551 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b6pqz" Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.413629 4935 scope.go:117] "RemoveContainer" containerID="ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4" Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.454763 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b6pqz"] Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.456525 4935 scope.go:117] "RemoveContainer" containerID="c29e4d5dc984f00f58d68b2339183a9131edd12d4f0f86b9d94a9dc84aa6f321" Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.468047 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b6pqz"] Dec 01 20:07:38 crc kubenswrapper[4935]: I1201 20:07:38.521858 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc15c87d-8ffe-40e0-a8f9-11ed099e5668" path="/var/lib/kubelet/pods/cc15c87d-8ffe-40e0-a8f9-11ed099e5668/volumes" Dec 01 20:07:39 crc kubenswrapper[4935]: I1201 20:07:39.120066 4935 scope.go:117] "RemoveContainer" containerID="5664943591be1458f167b12a3d2a3b8cf2d49dc7be2222e09052c5e643953e2d" Dec 01 20:07:39 crc kubenswrapper[4935]: I1201 20:07:39.191922 4935 scope.go:117] "RemoveContainer" containerID="ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4" Dec 01 20:07:39 crc kubenswrapper[4935]: E1201 20:07:39.192690 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4\": container with ID starting with ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4 not found: ID does not exist" containerID="ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4" Dec 01 20:07:39 crc kubenswrapper[4935]: I1201 20:07:39.192740 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4"} err="failed to get container status \"ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4\": rpc error: code = NotFound desc = could not find container \"ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4\": container with ID starting with ff25b2f64f33cd6c949026fbd4fdf9d1579ac80b21049a37cb921cbaef39d4d4 not found: ID does not exist" Dec 01 20:07:39 crc kubenswrapper[4935]: I1201 20:07:39.192771 4935 scope.go:117] "RemoveContainer" containerID="c29e4d5dc984f00f58d68b2339183a9131edd12d4f0f86b9d94a9dc84aa6f321" Dec 01 20:07:39 crc kubenswrapper[4935]: E1201 20:07:39.193355 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c29e4d5dc984f00f58d68b2339183a9131edd12d4f0f86b9d94a9dc84aa6f321\": container with ID starting with c29e4d5dc984f00f58d68b2339183a9131edd12d4f0f86b9d94a9dc84aa6f321 not found: ID does not exist" containerID="c29e4d5dc984f00f58d68b2339183a9131edd12d4f0f86b9d94a9dc84aa6f321" Dec 01 20:07:39 crc kubenswrapper[4935]: I1201 20:07:39.193383 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c29e4d5dc984f00f58d68b2339183a9131edd12d4f0f86b9d94a9dc84aa6f321"} err="failed to get container status \"c29e4d5dc984f00f58d68b2339183a9131edd12d4f0f86b9d94a9dc84aa6f321\": rpc error: code = NotFound desc = could not find container \"c29e4d5dc984f00f58d68b2339183a9131edd12d4f0f86b9d94a9dc84aa6f321\": container with ID starting with c29e4d5dc984f00f58d68b2339183a9131edd12d4f0f86b9d94a9dc84aa6f321 not found: ID does not exist" Dec 01 20:07:39 crc kubenswrapper[4935]: I1201 20:07:39.193403 4935 scope.go:117] "RemoveContainer" containerID="5664943591be1458f167b12a3d2a3b8cf2d49dc7be2222e09052c5e643953e2d" Dec 01 20:07:39 crc kubenswrapper[4935]: E1201 20:07:39.194058 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5664943591be1458f167b12a3d2a3b8cf2d49dc7be2222e09052c5e643953e2d\": container with ID starting with 5664943591be1458f167b12a3d2a3b8cf2d49dc7be2222e09052c5e643953e2d not found: ID does not exist" containerID="5664943591be1458f167b12a3d2a3b8cf2d49dc7be2222e09052c5e643953e2d" Dec 01 20:07:39 crc kubenswrapper[4935]: I1201 20:07:39.194096 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5664943591be1458f167b12a3d2a3b8cf2d49dc7be2222e09052c5e643953e2d"} err="failed to get container status \"5664943591be1458f167b12a3d2a3b8cf2d49dc7be2222e09052c5e643953e2d\": rpc error: code = NotFound desc = could not find container \"5664943591be1458f167b12a3d2a3b8cf2d49dc7be2222e09052c5e643953e2d\": container with ID starting with 5664943591be1458f167b12a3d2a3b8cf2d49dc7be2222e09052c5e643953e2d not found: ID does not exist" Dec 01 20:07:44 crc kubenswrapper[4935]: I1201 20:07:44.508638 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:07:44 crc kubenswrapper[4935]: E1201 20:07:44.509615 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:07:59 crc kubenswrapper[4935]: I1201 20:07:59.508931 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:07:59 crc kubenswrapper[4935]: E1201 20:07:59.510122 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:08:14 crc kubenswrapper[4935]: I1201 20:08:14.508912 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:08:14 crc kubenswrapper[4935]: E1201 20:08:14.509836 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:08:28 crc kubenswrapper[4935]: I1201 20:08:28.508628 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:08:28 crc kubenswrapper[4935]: E1201 20:08:28.509490 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:08:39 crc kubenswrapper[4935]: I1201 20:08:39.509439 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:08:39 crc kubenswrapper[4935]: E1201 20:08:39.510414 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:08:54 crc kubenswrapper[4935]: I1201 20:08:54.508008 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:08:54 crc kubenswrapper[4935]: E1201 20:08:54.508959 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:09:05 crc kubenswrapper[4935]: I1201 20:09:05.508936 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:09:05 crc kubenswrapper[4935]: E1201 20:09:05.510124 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:09:17 crc kubenswrapper[4935]: I1201 20:09:17.507962 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:09:17 crc kubenswrapper[4935]: E1201 20:09:17.509258 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:09:28 crc kubenswrapper[4935]: I1201 20:09:28.509476 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:09:28 crc kubenswrapper[4935]: I1201 20:09:28.839850 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"7accc577ff3ee35e634014a97efd720d6261296da14379225b5efcb18ad23775"} Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.598283 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-46ddf"] Dec 01 20:09:31 crc kubenswrapper[4935]: E1201 20:09:31.599611 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc15c87d-8ffe-40e0-a8f9-11ed099e5668" containerName="extract-content" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.599630 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc15c87d-8ffe-40e0-a8f9-11ed099e5668" containerName="extract-content" Dec 01 20:09:31 crc kubenswrapper[4935]: E1201 20:09:31.599697 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc15c87d-8ffe-40e0-a8f9-11ed099e5668" containerName="registry-server" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.599706 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc15c87d-8ffe-40e0-a8f9-11ed099e5668" containerName="registry-server" Dec 01 20:09:31 crc kubenswrapper[4935]: E1201 20:09:31.599724 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc15c87d-8ffe-40e0-a8f9-11ed099e5668" containerName="extract-utilities" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.599734 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc15c87d-8ffe-40e0-a8f9-11ed099e5668" containerName="extract-utilities" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.600040 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc15c87d-8ffe-40e0-a8f9-11ed099e5668" containerName="registry-server" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.606805 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.618024 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-46ddf"] Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.764733 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8mq6\" (UniqueName: \"kubernetes.io/projected/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-kube-api-access-t8mq6\") pod \"community-operators-46ddf\" (UID: \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\") " pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.765175 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-catalog-content\") pod \"community-operators-46ddf\" (UID: \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\") " pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.765231 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-utilities\") pod \"community-operators-46ddf\" (UID: \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\") " pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.867610 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8mq6\" (UniqueName: \"kubernetes.io/projected/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-kube-api-access-t8mq6\") pod \"community-operators-46ddf\" (UID: \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\") " pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.867694 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-catalog-content\") pod \"community-operators-46ddf\" (UID: \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\") " pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.867756 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-utilities\") pod \"community-operators-46ddf\" (UID: \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\") " pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.868221 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-catalog-content\") pod \"community-operators-46ddf\" (UID: \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\") " pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.868315 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-utilities\") pod \"community-operators-46ddf\" (UID: \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\") " pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.899912 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8mq6\" (UniqueName: \"kubernetes.io/projected/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-kube-api-access-t8mq6\") pod \"community-operators-46ddf\" (UID: \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\") " pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:31 crc kubenswrapper[4935]: I1201 20:09:31.942968 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:32 crc kubenswrapper[4935]: I1201 20:09:32.525823 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-46ddf"] Dec 01 20:09:33 crc kubenswrapper[4935]: W1201 20:09:33.013703 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ce2c7db_ab29_48f8_b1cf_008dcc22f89b.slice/crio-6144a250ba49463a63cf50400b864c25a719eaddda04049ffb849a69933d5282 WatchSource:0}: Error finding container 6144a250ba49463a63cf50400b864c25a719eaddda04049ffb849a69933d5282: Status 404 returned error can't find the container with id 6144a250ba49463a63cf50400b864c25a719eaddda04049ffb849a69933d5282 Dec 01 20:09:33 crc kubenswrapper[4935]: I1201 20:09:33.899179 4935 generic.go:334] "Generic (PLEG): container finished" podID="5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" containerID="33ebde75a3202d0b470f631c1a49b4052aa5f0b7e80a9cb94a1c8e765c1e0a1e" exitCode=0 Dec 01 20:09:33 crc kubenswrapper[4935]: I1201 20:09:33.899499 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ddf" event={"ID":"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b","Type":"ContainerDied","Data":"33ebde75a3202d0b470f631c1a49b4052aa5f0b7e80a9cb94a1c8e765c1e0a1e"} Dec 01 20:09:33 crc kubenswrapper[4935]: I1201 20:09:33.899670 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ddf" event={"ID":"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b","Type":"ContainerStarted","Data":"6144a250ba49463a63cf50400b864c25a719eaddda04049ffb849a69933d5282"} Dec 01 20:09:33 crc kubenswrapper[4935]: I1201 20:09:33.903476 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:09:35 crc kubenswrapper[4935]: I1201 20:09:35.932005 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ddf" event={"ID":"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b","Type":"ContainerStarted","Data":"0e67d24339813c0ec9e8f300788bc74d4868c5dccb6e34ef9940f8d9657a7864"} Dec 01 20:09:36 crc kubenswrapper[4935]: I1201 20:09:36.952340 4935 generic.go:334] "Generic (PLEG): container finished" podID="5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" containerID="0e67d24339813c0ec9e8f300788bc74d4868c5dccb6e34ef9940f8d9657a7864" exitCode=0 Dec 01 20:09:36 crc kubenswrapper[4935]: I1201 20:09:36.952411 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ddf" event={"ID":"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b","Type":"ContainerDied","Data":"0e67d24339813c0ec9e8f300788bc74d4868c5dccb6e34ef9940f8d9657a7864"} Dec 01 20:09:37 crc kubenswrapper[4935]: I1201 20:09:37.973942 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ddf" event={"ID":"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b","Type":"ContainerStarted","Data":"0a7d8bd00fe4cc3561141e46a49995a8c4ce0d1715bdbd65cccb593f6d63a6be"} Dec 01 20:09:38 crc kubenswrapper[4935]: I1201 20:09:38.013096 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-46ddf" podStartSLOduration=3.367574245 podStartE2EDuration="7.013072528s" podCreationTimestamp="2025-12-01 20:09:31 +0000 UTC" firstStartedPulling="2025-12-01 20:09:33.903271981 +0000 UTC m=+5987.924901240" lastFinishedPulling="2025-12-01 20:09:37.548770254 +0000 UTC m=+5991.570399523" observedRunningTime="2025-12-01 20:09:37.997579184 +0000 UTC m=+5992.019208483" watchObservedRunningTime="2025-12-01 20:09:38.013072528 +0000 UTC m=+5992.034701797" Dec 01 20:09:41 crc kubenswrapper[4935]: I1201 20:09:41.943294 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:41 crc kubenswrapper[4935]: I1201 20:09:41.945046 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:42 crc kubenswrapper[4935]: I1201 20:09:42.025339 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:42 crc kubenswrapper[4935]: I1201 20:09:42.119005 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:42 crc kubenswrapper[4935]: I1201 20:09:42.293171 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-46ddf"] Dec 01 20:09:44 crc kubenswrapper[4935]: I1201 20:09:44.054628 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-46ddf" podUID="5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" containerName="registry-server" containerID="cri-o://0a7d8bd00fe4cc3561141e46a49995a8c4ce0d1715bdbd65cccb593f6d63a6be" gracePeriod=2 Dec 01 20:09:44 crc kubenswrapper[4935]: I1201 20:09:44.742752 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:44 crc kubenswrapper[4935]: I1201 20:09:44.846906 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-utilities\") pod \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\" (UID: \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\") " Dec 01 20:09:44 crc kubenswrapper[4935]: I1201 20:09:44.846964 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8mq6\" (UniqueName: \"kubernetes.io/projected/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-kube-api-access-t8mq6\") pod \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\" (UID: \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\") " Dec 01 20:09:44 crc kubenswrapper[4935]: I1201 20:09:44.847113 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-catalog-content\") pod \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\" (UID: \"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b\") " Dec 01 20:09:44 crc kubenswrapper[4935]: I1201 20:09:44.847802 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-utilities" (OuterVolumeSpecName: "utilities") pod "5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" (UID: "5ce2c7db-ab29-48f8-b1cf-008dcc22f89b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:09:44 crc kubenswrapper[4935]: I1201 20:09:44.853112 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-kube-api-access-t8mq6" (OuterVolumeSpecName: "kube-api-access-t8mq6") pod "5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" (UID: "5ce2c7db-ab29-48f8-b1cf-008dcc22f89b"). InnerVolumeSpecName "kube-api-access-t8mq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:09:44 crc kubenswrapper[4935]: I1201 20:09:44.897213 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" (UID: "5ce2c7db-ab29-48f8-b1cf-008dcc22f89b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:09:44 crc kubenswrapper[4935]: I1201 20:09:44.950289 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:44 crc kubenswrapper[4935]: I1201 20:09:44.950331 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8mq6\" (UniqueName: \"kubernetes.io/projected/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-kube-api-access-t8mq6\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:44 crc kubenswrapper[4935]: I1201 20:09:44.950342 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.069104 4935 generic.go:334] "Generic (PLEG): container finished" podID="5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" containerID="0a7d8bd00fe4cc3561141e46a49995a8c4ce0d1715bdbd65cccb593f6d63a6be" exitCode=0 Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.069195 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ddf" event={"ID":"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b","Type":"ContainerDied","Data":"0a7d8bd00fe4cc3561141e46a49995a8c4ce0d1715bdbd65cccb593f6d63a6be"} Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.069278 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46ddf" event={"ID":"5ce2c7db-ab29-48f8-b1cf-008dcc22f89b","Type":"ContainerDied","Data":"6144a250ba49463a63cf50400b864c25a719eaddda04049ffb849a69933d5282"} Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.069304 4935 scope.go:117] "RemoveContainer" containerID="0a7d8bd00fe4cc3561141e46a49995a8c4ce0d1715bdbd65cccb593f6d63a6be" Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.069228 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46ddf" Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.096092 4935 scope.go:117] "RemoveContainer" containerID="0e67d24339813c0ec9e8f300788bc74d4868c5dccb6e34ef9940f8d9657a7864" Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.125176 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-46ddf"] Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.137925 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-46ddf"] Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.141828 4935 scope.go:117] "RemoveContainer" containerID="33ebde75a3202d0b470f631c1a49b4052aa5f0b7e80a9cb94a1c8e765c1e0a1e" Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.213474 4935 scope.go:117] "RemoveContainer" containerID="0a7d8bd00fe4cc3561141e46a49995a8c4ce0d1715bdbd65cccb593f6d63a6be" Dec 01 20:09:45 crc kubenswrapper[4935]: E1201 20:09:45.214049 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a7d8bd00fe4cc3561141e46a49995a8c4ce0d1715bdbd65cccb593f6d63a6be\": container with ID starting with 0a7d8bd00fe4cc3561141e46a49995a8c4ce0d1715bdbd65cccb593f6d63a6be not found: ID does not exist" containerID="0a7d8bd00fe4cc3561141e46a49995a8c4ce0d1715bdbd65cccb593f6d63a6be" Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.214083 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a7d8bd00fe4cc3561141e46a49995a8c4ce0d1715bdbd65cccb593f6d63a6be"} err="failed to get container status \"0a7d8bd00fe4cc3561141e46a49995a8c4ce0d1715bdbd65cccb593f6d63a6be\": rpc error: code = NotFound desc = could not find container \"0a7d8bd00fe4cc3561141e46a49995a8c4ce0d1715bdbd65cccb593f6d63a6be\": container with ID starting with 0a7d8bd00fe4cc3561141e46a49995a8c4ce0d1715bdbd65cccb593f6d63a6be not found: ID does not exist" Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.214112 4935 scope.go:117] "RemoveContainer" containerID="0e67d24339813c0ec9e8f300788bc74d4868c5dccb6e34ef9940f8d9657a7864" Dec 01 20:09:45 crc kubenswrapper[4935]: E1201 20:09:45.214525 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e67d24339813c0ec9e8f300788bc74d4868c5dccb6e34ef9940f8d9657a7864\": container with ID starting with 0e67d24339813c0ec9e8f300788bc74d4868c5dccb6e34ef9940f8d9657a7864 not found: ID does not exist" containerID="0e67d24339813c0ec9e8f300788bc74d4868c5dccb6e34ef9940f8d9657a7864" Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.214550 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e67d24339813c0ec9e8f300788bc74d4868c5dccb6e34ef9940f8d9657a7864"} err="failed to get container status \"0e67d24339813c0ec9e8f300788bc74d4868c5dccb6e34ef9940f8d9657a7864\": rpc error: code = NotFound desc = could not find container \"0e67d24339813c0ec9e8f300788bc74d4868c5dccb6e34ef9940f8d9657a7864\": container with ID starting with 0e67d24339813c0ec9e8f300788bc74d4868c5dccb6e34ef9940f8d9657a7864 not found: ID does not exist" Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.214564 4935 scope.go:117] "RemoveContainer" containerID="33ebde75a3202d0b470f631c1a49b4052aa5f0b7e80a9cb94a1c8e765c1e0a1e" Dec 01 20:09:45 crc kubenswrapper[4935]: E1201 20:09:45.214960 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33ebde75a3202d0b470f631c1a49b4052aa5f0b7e80a9cb94a1c8e765c1e0a1e\": container with ID starting with 33ebde75a3202d0b470f631c1a49b4052aa5f0b7e80a9cb94a1c8e765c1e0a1e not found: ID does not exist" containerID="33ebde75a3202d0b470f631c1a49b4052aa5f0b7e80a9cb94a1c8e765c1e0a1e" Dec 01 20:09:45 crc kubenswrapper[4935]: I1201 20:09:45.215034 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33ebde75a3202d0b470f631c1a49b4052aa5f0b7e80a9cb94a1c8e765c1e0a1e"} err="failed to get container status \"33ebde75a3202d0b470f631c1a49b4052aa5f0b7e80a9cb94a1c8e765c1e0a1e\": rpc error: code = NotFound desc = could not find container \"33ebde75a3202d0b470f631c1a49b4052aa5f0b7e80a9cb94a1c8e765c1e0a1e\": container with ID starting with 33ebde75a3202d0b470f631c1a49b4052aa5f0b7e80a9cb94a1c8e765c1e0a1e not found: ID does not exist" Dec 01 20:09:46 crc kubenswrapper[4935]: I1201 20:09:46.524131 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" path="/var/lib/kubelet/pods/5ce2c7db-ab29-48f8-b1cf-008dcc22f89b/volumes" Dec 01 20:11:13 crc kubenswrapper[4935]: I1201 20:11:13.068834 4935 generic.go:334] "Generic (PLEG): container finished" podID="ed5fb4cf-a415-4429-af67-924e3f70cb3d" containerID="1179400c9ecdbfed9dc0eb3c0bf9a4658797dbb22eaf862ac636c2e1e63cf728" exitCode=0 Dec 01 20:11:13 crc kubenswrapper[4935]: I1201 20:11:13.068895 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"ed5fb4cf-a415-4429-af67-924e3f70cb3d","Type":"ContainerDied","Data":"1179400c9ecdbfed9dc0eb3c0bf9a4658797dbb22eaf862ac636c2e1e63cf728"} Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.433060 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.527542 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-ca-certs\") pod \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.528010 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ed5fb4cf-a415-4429-af67-924e3f70cb3d-openstack-config\") pod \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.528054 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-ssh-key\") pod \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.528101 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ed5fb4cf-a415-4429-af67-924e3f70cb3d-config-data\") pod \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.528138 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/ed5fb4cf-a415-4429-af67-924e3f70cb3d-test-operator-ephemeral-workdir\") pod \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.528190 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/ed5fb4cf-a415-4429-af67-924e3f70cb3d-test-operator-ephemeral-temporary\") pod \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.528348 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cm5m\" (UniqueName: \"kubernetes.io/projected/ed5fb4cf-a415-4429-af67-924e3f70cb3d-kube-api-access-6cm5m\") pod \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.528455 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.528528 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-openstack-config-secret\") pod \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\" (UID: \"ed5fb4cf-a415-4429-af67-924e3f70cb3d\") " Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.528988 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed5fb4cf-a415-4429-af67-924e3f70cb3d-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "ed5fb4cf-a415-4429-af67-924e3f70cb3d" (UID: "ed5fb4cf-a415-4429-af67-924e3f70cb3d"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.529759 4935 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/ed5fb4cf-a415-4429-af67-924e3f70cb3d-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.530097 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed5fb4cf-a415-4429-af67-924e3f70cb3d-config-data" (OuterVolumeSpecName: "config-data") pod "ed5fb4cf-a415-4429-af67-924e3f70cb3d" (UID: "ed5fb4cf-a415-4429-af67-924e3f70cb3d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.535004 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed5fb4cf-a415-4429-af67-924e3f70cb3d-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "ed5fb4cf-a415-4429-af67-924e3f70cb3d" (UID: "ed5fb4cf-a415-4429-af67-924e3f70cb3d"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.535834 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed5fb4cf-a415-4429-af67-924e3f70cb3d-kube-api-access-6cm5m" (OuterVolumeSpecName: "kube-api-access-6cm5m") pod "ed5fb4cf-a415-4429-af67-924e3f70cb3d" (UID: "ed5fb4cf-a415-4429-af67-924e3f70cb3d"). InnerVolumeSpecName "kube-api-access-6cm5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.541860 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "test-operator-logs") pod "ed5fb4cf-a415-4429-af67-924e3f70cb3d" (UID: "ed5fb4cf-a415-4429-af67-924e3f70cb3d"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.569715 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "ed5fb4cf-a415-4429-af67-924e3f70cb3d" (UID: "ed5fb4cf-a415-4429-af67-924e3f70cb3d"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.574945 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "ed5fb4cf-a415-4429-af67-924e3f70cb3d" (UID: "ed5fb4cf-a415-4429-af67-924e3f70cb3d"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.586461 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ed5fb4cf-a415-4429-af67-924e3f70cb3d" (UID: "ed5fb4cf-a415-4429-af67-924e3f70cb3d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.598918 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed5fb4cf-a415-4429-af67-924e3f70cb3d-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "ed5fb4cf-a415-4429-af67-924e3f70cb3d" (UID: "ed5fb4cf-a415-4429-af67-924e3f70cb3d"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.632588 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cm5m\" (UniqueName: \"kubernetes.io/projected/ed5fb4cf-a415-4429-af67-924e3f70cb3d-kube-api-access-6cm5m\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.634704 4935 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.634728 4935 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.634738 4935 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.634748 4935 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ed5fb4cf-a415-4429-af67-924e3f70cb3d-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.634758 4935 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ed5fb4cf-a415-4429-af67-924e3f70cb3d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.634770 4935 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ed5fb4cf-a415-4429-af67-924e3f70cb3d-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.634780 4935 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/ed5fb4cf-a415-4429-af67-924e3f70cb3d-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.666615 4935 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 01 20:11:14 crc kubenswrapper[4935]: I1201 20:11:14.737120 4935 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:15 crc kubenswrapper[4935]: I1201 20:11:15.095788 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"ed5fb4cf-a415-4429-af67-924e3f70cb3d","Type":"ContainerDied","Data":"83ba65ceb49581f83d43009e8dbe04203cccb103cd6678fb7421499093f4fe96"} Dec 01 20:11:15 crc kubenswrapper[4935]: I1201 20:11:15.095839 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83ba65ceb49581f83d43009e8dbe04203cccb103cd6678fb7421499093f4fe96" Dec 01 20:11:15 crc kubenswrapper[4935]: I1201 20:11:15.095904 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.346542 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 01 20:11:17 crc kubenswrapper[4935]: E1201 20:11:17.348828 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed5fb4cf-a415-4429-af67-924e3f70cb3d" containerName="tempest-tests-tempest-tests-runner" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.348871 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed5fb4cf-a415-4429-af67-924e3f70cb3d" containerName="tempest-tests-tempest-tests-runner" Dec 01 20:11:17 crc kubenswrapper[4935]: E1201 20:11:17.348902 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" containerName="extract-content" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.348911 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" containerName="extract-content" Dec 01 20:11:17 crc kubenswrapper[4935]: E1201 20:11:17.348951 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" containerName="registry-server" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.348961 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" containerName="registry-server" Dec 01 20:11:17 crc kubenswrapper[4935]: E1201 20:11:17.348999 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" containerName="extract-utilities" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.349007 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" containerName="extract-utilities" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.349315 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed5fb4cf-a415-4429-af67-924e3f70cb3d" containerName="tempest-tests-tempest-tests-runner" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.349351 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ce2c7db-ab29-48f8-b1cf-008dcc22f89b" containerName="registry-server" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.350449 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.356732 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-hh695" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.364434 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.405525 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"69e5db88-4c8e-4e1a-926f-285b3f5312d1\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.405628 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frmvj\" (UniqueName: \"kubernetes.io/projected/69e5db88-4c8e-4e1a-926f-285b3f5312d1-kube-api-access-frmvj\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"69e5db88-4c8e-4e1a-926f-285b3f5312d1\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.517115 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"69e5db88-4c8e-4e1a-926f-285b3f5312d1\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.517316 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frmvj\" (UniqueName: \"kubernetes.io/projected/69e5db88-4c8e-4e1a-926f-285b3f5312d1-kube-api-access-frmvj\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"69e5db88-4c8e-4e1a-926f-285b3f5312d1\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.522071 4935 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"69e5db88-4c8e-4e1a-926f-285b3f5312d1\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.544928 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frmvj\" (UniqueName: \"kubernetes.io/projected/69e5db88-4c8e-4e1a-926f-285b3f5312d1-kube-api-access-frmvj\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"69e5db88-4c8e-4e1a-926f-285b3f5312d1\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.577812 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"69e5db88-4c8e-4e1a-926f-285b3f5312d1\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:11:17 crc kubenswrapper[4935]: I1201 20:11:17.687655 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:11:18 crc kubenswrapper[4935]: W1201 20:11:18.228375 4935 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69e5db88_4c8e_4e1a_926f_285b3f5312d1.slice/crio-9f55c7919453a814586686280f28ad8c29a678475382fbe25794a1d9a4eef129 WatchSource:0}: Error finding container 9f55c7919453a814586686280f28ad8c29a678475382fbe25794a1d9a4eef129: Status 404 returned error can't find the container with id 9f55c7919453a814586686280f28ad8c29a678475382fbe25794a1d9a4eef129 Dec 01 20:11:18 crc kubenswrapper[4935]: I1201 20:11:18.231130 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 01 20:11:19 crc kubenswrapper[4935]: I1201 20:11:19.144833 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"69e5db88-4c8e-4e1a-926f-285b3f5312d1","Type":"ContainerStarted","Data":"9f55c7919453a814586686280f28ad8c29a678475382fbe25794a1d9a4eef129"} Dec 01 20:11:20 crc kubenswrapper[4935]: I1201 20:11:20.156718 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"69e5db88-4c8e-4e1a-926f-285b3f5312d1","Type":"ContainerStarted","Data":"b598bcdad86810644b5b72ad80962a4ac4d3c65d7389c70b0cd25d2b46a52ed1"} Dec 01 20:11:20 crc kubenswrapper[4935]: I1201 20:11:20.172314 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.722823557 podStartE2EDuration="3.172291881s" podCreationTimestamp="2025-12-01 20:11:17 +0000 UTC" firstStartedPulling="2025-12-01 20:11:18.232601563 +0000 UTC m=+6092.254230832" lastFinishedPulling="2025-12-01 20:11:19.682069897 +0000 UTC m=+6093.703699156" observedRunningTime="2025-12-01 20:11:20.169019299 +0000 UTC m=+6094.190648558" watchObservedRunningTime="2025-12-01 20:11:20.172291881 +0000 UTC m=+6094.193921140" Dec 01 20:11:54 crc kubenswrapper[4935]: I1201 20:11:54.346474 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:11:54 crc kubenswrapper[4935]: I1201 20:11:54.347257 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.149470 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rhfwr/must-gather-sxcst"] Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.152555 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/must-gather-sxcst" Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.155293 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-rhfwr"/"default-dockercfg-nwpnw" Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.155851 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-rhfwr"/"openshift-service-ca.crt" Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.155852 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-rhfwr"/"kube-root-ca.crt" Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.227829 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d0b1f897-ab76-4bf0-888c-748a3dc52c96-must-gather-output\") pod \"must-gather-sxcst\" (UID: \"d0b1f897-ab76-4bf0-888c-748a3dc52c96\") " pod="openshift-must-gather-rhfwr/must-gather-sxcst" Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.228009 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8vsw\" (UniqueName: \"kubernetes.io/projected/d0b1f897-ab76-4bf0-888c-748a3dc52c96-kube-api-access-r8vsw\") pod \"must-gather-sxcst\" (UID: \"d0b1f897-ab76-4bf0-888c-748a3dc52c96\") " pod="openshift-must-gather-rhfwr/must-gather-sxcst" Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.329701 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8vsw\" (UniqueName: \"kubernetes.io/projected/d0b1f897-ab76-4bf0-888c-748a3dc52c96-kube-api-access-r8vsw\") pod \"must-gather-sxcst\" (UID: \"d0b1f897-ab76-4bf0-888c-748a3dc52c96\") " pod="openshift-must-gather-rhfwr/must-gather-sxcst" Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.329890 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d0b1f897-ab76-4bf0-888c-748a3dc52c96-must-gather-output\") pod \"must-gather-sxcst\" (UID: \"d0b1f897-ab76-4bf0-888c-748a3dc52c96\") " pod="openshift-must-gather-rhfwr/must-gather-sxcst" Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.330413 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d0b1f897-ab76-4bf0-888c-748a3dc52c96-must-gather-output\") pod \"must-gather-sxcst\" (UID: \"d0b1f897-ab76-4bf0-888c-748a3dc52c96\") " pod="openshift-must-gather-rhfwr/must-gather-sxcst" Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.347772 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8vsw\" (UniqueName: \"kubernetes.io/projected/d0b1f897-ab76-4bf0-888c-748a3dc52c96-kube-api-access-r8vsw\") pod \"must-gather-sxcst\" (UID: \"d0b1f897-ab76-4bf0-888c-748a3dc52c96\") " pod="openshift-must-gather-rhfwr/must-gather-sxcst" Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.408928 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-rhfwr/must-gather-sxcst"] Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.472681 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/must-gather-sxcst" Dec 01 20:12:05 crc kubenswrapper[4935]: I1201 20:12:05.964048 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-rhfwr/must-gather-sxcst"] Dec 01 20:12:06 crc kubenswrapper[4935]: I1201 20:12:06.720956 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rhfwr/must-gather-sxcst" event={"ID":"d0b1f897-ab76-4bf0-888c-748a3dc52c96","Type":"ContainerStarted","Data":"c2f64e3e6968b91e4e568a2128e607ffeb6c56ce79118e8b33f8b1c1cf97b441"} Dec 01 20:12:13 crc kubenswrapper[4935]: I1201 20:12:13.827057 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rhfwr/must-gather-sxcst" event={"ID":"d0b1f897-ab76-4bf0-888c-748a3dc52c96","Type":"ContainerStarted","Data":"225c2b85640ad2766db30fa18036c1aa361ca974decd24b656cc64dfab2d3d13"} Dec 01 20:12:13 crc kubenswrapper[4935]: I1201 20:12:13.828527 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rhfwr/must-gather-sxcst" event={"ID":"d0b1f897-ab76-4bf0-888c-748a3dc52c96","Type":"ContainerStarted","Data":"bfdced1dd3c2098cde224aafb4b4579cf4bdfe5c6340b9b2019be146876dcfeb"} Dec 01 20:12:13 crc kubenswrapper[4935]: I1201 20:12:13.854546 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-rhfwr/must-gather-sxcst" podStartSLOduration=2.080083097 podStartE2EDuration="8.854526297s" podCreationTimestamp="2025-12-01 20:12:05 +0000 UTC" firstStartedPulling="2025-12-01 20:12:05.968267034 +0000 UTC m=+6139.989896293" lastFinishedPulling="2025-12-01 20:12:12.742710234 +0000 UTC m=+6146.764339493" observedRunningTime="2025-12-01 20:12:13.847454676 +0000 UTC m=+6147.869083935" watchObservedRunningTime="2025-12-01 20:12:13.854526297 +0000 UTC m=+6147.876155556" Dec 01 20:12:17 crc kubenswrapper[4935]: I1201 20:12:17.354508 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rhfwr/crc-debug-hc9dz"] Dec 01 20:12:17 crc kubenswrapper[4935]: I1201 20:12:17.357210 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" Dec 01 20:12:17 crc kubenswrapper[4935]: I1201 20:12:17.451933 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0a915a9f-f913-4e0f-812b-5eba21128cfd-host\") pod \"crc-debug-hc9dz\" (UID: \"0a915a9f-f913-4e0f-812b-5eba21128cfd\") " pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" Dec 01 20:12:17 crc kubenswrapper[4935]: I1201 20:12:17.452098 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brs4w\" (UniqueName: \"kubernetes.io/projected/0a915a9f-f913-4e0f-812b-5eba21128cfd-kube-api-access-brs4w\") pod \"crc-debug-hc9dz\" (UID: \"0a915a9f-f913-4e0f-812b-5eba21128cfd\") " pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" Dec 01 20:12:17 crc kubenswrapper[4935]: I1201 20:12:17.554854 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0a915a9f-f913-4e0f-812b-5eba21128cfd-host\") pod \"crc-debug-hc9dz\" (UID: \"0a915a9f-f913-4e0f-812b-5eba21128cfd\") " pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" Dec 01 20:12:17 crc kubenswrapper[4935]: I1201 20:12:17.554984 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brs4w\" (UniqueName: \"kubernetes.io/projected/0a915a9f-f913-4e0f-812b-5eba21128cfd-kube-api-access-brs4w\") pod \"crc-debug-hc9dz\" (UID: \"0a915a9f-f913-4e0f-812b-5eba21128cfd\") " pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" Dec 01 20:12:17 crc kubenswrapper[4935]: I1201 20:12:17.555536 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0a915a9f-f913-4e0f-812b-5eba21128cfd-host\") pod \"crc-debug-hc9dz\" (UID: \"0a915a9f-f913-4e0f-812b-5eba21128cfd\") " pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" Dec 01 20:12:17 crc kubenswrapper[4935]: I1201 20:12:17.579639 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brs4w\" (UniqueName: \"kubernetes.io/projected/0a915a9f-f913-4e0f-812b-5eba21128cfd-kube-api-access-brs4w\") pod \"crc-debug-hc9dz\" (UID: \"0a915a9f-f913-4e0f-812b-5eba21128cfd\") " pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" Dec 01 20:12:17 crc kubenswrapper[4935]: I1201 20:12:17.677266 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" Dec 01 20:12:17 crc kubenswrapper[4935]: I1201 20:12:17.870861 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" event={"ID":"0a915a9f-f913-4e0f-812b-5eba21128cfd","Type":"ContainerStarted","Data":"1d4d0257cc909cf019e626eff3a3031e8016a85aadcd280d7ed65920333d641e"} Dec 01 20:12:24 crc kubenswrapper[4935]: I1201 20:12:24.346199 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:12:24 crc kubenswrapper[4935]: I1201 20:12:24.346722 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:12:31 crc kubenswrapper[4935]: I1201 20:12:31.041896 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" event={"ID":"0a915a9f-f913-4e0f-812b-5eba21128cfd","Type":"ContainerStarted","Data":"a26f78480f07f1b8675c77440e925f22bb8e255eec9b708475125e0456a3e7f0"} Dec 01 20:12:31 crc kubenswrapper[4935]: I1201 20:12:31.061164 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" podStartSLOduration=1.756842024 podStartE2EDuration="14.061132043s" podCreationTimestamp="2025-12-01 20:12:17 +0000 UTC" firstStartedPulling="2025-12-01 20:12:17.729915398 +0000 UTC m=+6151.751544657" lastFinishedPulling="2025-12-01 20:12:30.034205417 +0000 UTC m=+6164.055834676" observedRunningTime="2025-12-01 20:12:31.056823828 +0000 UTC m=+6165.078453087" watchObservedRunningTime="2025-12-01 20:12:31.061132043 +0000 UTC m=+6165.082761302" Dec 01 20:12:54 crc kubenswrapper[4935]: I1201 20:12:54.346019 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:12:54 crc kubenswrapper[4935]: I1201 20:12:54.346775 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:12:54 crc kubenswrapper[4935]: I1201 20:12:54.346829 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 20:12:54 crc kubenswrapper[4935]: I1201 20:12:54.347998 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7accc577ff3ee35e634014a97efd720d6261296da14379225b5efcb18ad23775"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:12:54 crc kubenswrapper[4935]: I1201 20:12:54.348057 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://7accc577ff3ee35e634014a97efd720d6261296da14379225b5efcb18ad23775" gracePeriod=600 Dec 01 20:12:55 crc kubenswrapper[4935]: I1201 20:12:55.337680 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="7accc577ff3ee35e634014a97efd720d6261296da14379225b5efcb18ad23775" exitCode=0 Dec 01 20:12:55 crc kubenswrapper[4935]: I1201 20:12:55.337751 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"7accc577ff3ee35e634014a97efd720d6261296da14379225b5efcb18ad23775"} Dec 01 20:12:55 crc kubenswrapper[4935]: I1201 20:12:55.338241 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b"} Dec 01 20:12:55 crc kubenswrapper[4935]: I1201 20:12:55.338267 4935 scope.go:117] "RemoveContainer" containerID="fc5a22405a53c18183e42c624dc1381fe362e25f9a66340b0e90036bf7fdddda" Dec 01 20:13:24 crc kubenswrapper[4935]: E1201 20:13:24.524174 4935 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a915a9f_f913_4e0f_812b_5eba21128cfd.slice/crio-a26f78480f07f1b8675c77440e925f22bb8e255eec9b708475125e0456a3e7f0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 20:13:24 crc kubenswrapper[4935]: I1201 20:13:24.697087 4935 generic.go:334] "Generic (PLEG): container finished" podID="0a915a9f-f913-4e0f-812b-5eba21128cfd" containerID="a26f78480f07f1b8675c77440e925f22bb8e255eec9b708475125e0456a3e7f0" exitCode=0 Dec 01 20:13:24 crc kubenswrapper[4935]: I1201 20:13:24.697558 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" event={"ID":"0a915a9f-f913-4e0f-812b-5eba21128cfd","Type":"ContainerDied","Data":"a26f78480f07f1b8675c77440e925f22bb8e255eec9b708475125e0456a3e7f0"} Dec 01 20:13:25 crc kubenswrapper[4935]: I1201 20:13:25.857255 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" Dec 01 20:13:25 crc kubenswrapper[4935]: I1201 20:13:25.901601 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rhfwr/crc-debug-hc9dz"] Dec 01 20:13:25 crc kubenswrapper[4935]: I1201 20:13:25.912362 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rhfwr/crc-debug-hc9dz"] Dec 01 20:13:25 crc kubenswrapper[4935]: I1201 20:13:25.995567 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brs4w\" (UniqueName: \"kubernetes.io/projected/0a915a9f-f913-4e0f-812b-5eba21128cfd-kube-api-access-brs4w\") pod \"0a915a9f-f913-4e0f-812b-5eba21128cfd\" (UID: \"0a915a9f-f913-4e0f-812b-5eba21128cfd\") " Dec 01 20:13:25 crc kubenswrapper[4935]: I1201 20:13:25.995759 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0a915a9f-f913-4e0f-812b-5eba21128cfd-host\") pod \"0a915a9f-f913-4e0f-812b-5eba21128cfd\" (UID: \"0a915a9f-f913-4e0f-812b-5eba21128cfd\") " Dec 01 20:13:25 crc kubenswrapper[4935]: I1201 20:13:25.995806 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0a915a9f-f913-4e0f-812b-5eba21128cfd-host" (OuterVolumeSpecName: "host") pod "0a915a9f-f913-4e0f-812b-5eba21128cfd" (UID: "0a915a9f-f913-4e0f-812b-5eba21128cfd"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:13:25 crc kubenswrapper[4935]: I1201 20:13:25.996662 4935 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0a915a9f-f913-4e0f-812b-5eba21128cfd-host\") on node \"crc\" DevicePath \"\"" Dec 01 20:13:26 crc kubenswrapper[4935]: I1201 20:13:26.001912 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a915a9f-f913-4e0f-812b-5eba21128cfd-kube-api-access-brs4w" (OuterVolumeSpecName: "kube-api-access-brs4w") pod "0a915a9f-f913-4e0f-812b-5eba21128cfd" (UID: "0a915a9f-f913-4e0f-812b-5eba21128cfd"). InnerVolumeSpecName "kube-api-access-brs4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:13:26 crc kubenswrapper[4935]: I1201 20:13:26.099291 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brs4w\" (UniqueName: \"kubernetes.io/projected/0a915a9f-f913-4e0f-812b-5eba21128cfd-kube-api-access-brs4w\") on node \"crc\" DevicePath \"\"" Dec 01 20:13:26 crc kubenswrapper[4935]: I1201 20:13:26.525044 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a915a9f-f913-4e0f-812b-5eba21128cfd" path="/var/lib/kubelet/pods/0a915a9f-f913-4e0f-812b-5eba21128cfd/volumes" Dec 01 20:13:26 crc kubenswrapper[4935]: I1201 20:13:26.734588 4935 scope.go:117] "RemoveContainer" containerID="a26f78480f07f1b8675c77440e925f22bb8e255eec9b708475125e0456a3e7f0" Dec 01 20:13:26 crc kubenswrapper[4935]: I1201 20:13:26.734617 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/crc-debug-hc9dz" Dec 01 20:13:27 crc kubenswrapper[4935]: I1201 20:13:27.067710 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rhfwr/crc-debug-r59cm"] Dec 01 20:13:27 crc kubenswrapper[4935]: E1201 20:13:27.068209 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a915a9f-f913-4e0f-812b-5eba21128cfd" containerName="container-00" Dec 01 20:13:27 crc kubenswrapper[4935]: I1201 20:13:27.068228 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a915a9f-f913-4e0f-812b-5eba21128cfd" containerName="container-00" Dec 01 20:13:27 crc kubenswrapper[4935]: I1201 20:13:27.068473 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a915a9f-f913-4e0f-812b-5eba21128cfd" containerName="container-00" Dec 01 20:13:27 crc kubenswrapper[4935]: I1201 20:13:27.069324 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/crc-debug-r59cm" Dec 01 20:13:27 crc kubenswrapper[4935]: I1201 20:13:27.225413 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25sht\" (UniqueName: \"kubernetes.io/projected/3216ccd3-6ece-43d0-912e-8ac72c9f87c0-kube-api-access-25sht\") pod \"crc-debug-r59cm\" (UID: \"3216ccd3-6ece-43d0-912e-8ac72c9f87c0\") " pod="openshift-must-gather-rhfwr/crc-debug-r59cm" Dec 01 20:13:27 crc kubenswrapper[4935]: I1201 20:13:27.225648 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3216ccd3-6ece-43d0-912e-8ac72c9f87c0-host\") pod \"crc-debug-r59cm\" (UID: \"3216ccd3-6ece-43d0-912e-8ac72c9f87c0\") " pod="openshift-must-gather-rhfwr/crc-debug-r59cm" Dec 01 20:13:27 crc kubenswrapper[4935]: I1201 20:13:27.329204 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3216ccd3-6ece-43d0-912e-8ac72c9f87c0-host\") pod \"crc-debug-r59cm\" (UID: \"3216ccd3-6ece-43d0-912e-8ac72c9f87c0\") " pod="openshift-must-gather-rhfwr/crc-debug-r59cm" Dec 01 20:13:27 crc kubenswrapper[4935]: I1201 20:13:27.329312 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3216ccd3-6ece-43d0-912e-8ac72c9f87c0-host\") pod \"crc-debug-r59cm\" (UID: \"3216ccd3-6ece-43d0-912e-8ac72c9f87c0\") " pod="openshift-must-gather-rhfwr/crc-debug-r59cm" Dec 01 20:13:27 crc kubenswrapper[4935]: I1201 20:13:27.329539 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25sht\" (UniqueName: \"kubernetes.io/projected/3216ccd3-6ece-43d0-912e-8ac72c9f87c0-kube-api-access-25sht\") pod \"crc-debug-r59cm\" (UID: \"3216ccd3-6ece-43d0-912e-8ac72c9f87c0\") " pod="openshift-must-gather-rhfwr/crc-debug-r59cm" Dec 01 20:13:27 crc kubenswrapper[4935]: I1201 20:13:27.348941 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25sht\" (UniqueName: \"kubernetes.io/projected/3216ccd3-6ece-43d0-912e-8ac72c9f87c0-kube-api-access-25sht\") pod \"crc-debug-r59cm\" (UID: \"3216ccd3-6ece-43d0-912e-8ac72c9f87c0\") " pod="openshift-must-gather-rhfwr/crc-debug-r59cm" Dec 01 20:13:27 crc kubenswrapper[4935]: I1201 20:13:27.388049 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/crc-debug-r59cm" Dec 01 20:13:27 crc kubenswrapper[4935]: I1201 20:13:27.753476 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rhfwr/crc-debug-r59cm" event={"ID":"3216ccd3-6ece-43d0-912e-8ac72c9f87c0","Type":"ContainerStarted","Data":"ef84a995909137738dc07838aaa28211b9e00a98a9070df65a2c11608924a26a"} Dec 01 20:13:28 crc kubenswrapper[4935]: I1201 20:13:28.779506 4935 generic.go:334] "Generic (PLEG): container finished" podID="3216ccd3-6ece-43d0-912e-8ac72c9f87c0" containerID="cd41c4d1b385fc74b3b288297e4a06a7377e7a1237e7553e66de1094d771342c" exitCode=0 Dec 01 20:13:28 crc kubenswrapper[4935]: I1201 20:13:28.780113 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rhfwr/crc-debug-r59cm" event={"ID":"3216ccd3-6ece-43d0-912e-8ac72c9f87c0","Type":"ContainerDied","Data":"cd41c4d1b385fc74b3b288297e4a06a7377e7a1237e7553e66de1094d771342c"} Dec 01 20:13:29 crc kubenswrapper[4935]: I1201 20:13:29.908413 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/crc-debug-r59cm" Dec 01 20:13:29 crc kubenswrapper[4935]: I1201 20:13:29.997382 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3216ccd3-6ece-43d0-912e-8ac72c9f87c0-host\") pod \"3216ccd3-6ece-43d0-912e-8ac72c9f87c0\" (UID: \"3216ccd3-6ece-43d0-912e-8ac72c9f87c0\") " Dec 01 20:13:29 crc kubenswrapper[4935]: I1201 20:13:29.997425 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25sht\" (UniqueName: \"kubernetes.io/projected/3216ccd3-6ece-43d0-912e-8ac72c9f87c0-kube-api-access-25sht\") pod \"3216ccd3-6ece-43d0-912e-8ac72c9f87c0\" (UID: \"3216ccd3-6ece-43d0-912e-8ac72c9f87c0\") " Dec 01 20:13:30 crc kubenswrapper[4935]: I1201 20:13:30.001588 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3216ccd3-6ece-43d0-912e-8ac72c9f87c0-host" (OuterVolumeSpecName: "host") pod "3216ccd3-6ece-43d0-912e-8ac72c9f87c0" (UID: "3216ccd3-6ece-43d0-912e-8ac72c9f87c0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:13:30 crc kubenswrapper[4935]: I1201 20:13:30.037449 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3216ccd3-6ece-43d0-912e-8ac72c9f87c0-kube-api-access-25sht" (OuterVolumeSpecName: "kube-api-access-25sht") pod "3216ccd3-6ece-43d0-912e-8ac72c9f87c0" (UID: "3216ccd3-6ece-43d0-912e-8ac72c9f87c0"). InnerVolumeSpecName "kube-api-access-25sht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:13:30 crc kubenswrapper[4935]: I1201 20:13:30.100229 4935 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3216ccd3-6ece-43d0-912e-8ac72c9f87c0-host\") on node \"crc\" DevicePath \"\"" Dec 01 20:13:30 crc kubenswrapper[4935]: I1201 20:13:30.100265 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25sht\" (UniqueName: \"kubernetes.io/projected/3216ccd3-6ece-43d0-912e-8ac72c9f87c0-kube-api-access-25sht\") on node \"crc\" DevicePath \"\"" Dec 01 20:13:30 crc kubenswrapper[4935]: I1201 20:13:30.800736 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rhfwr/crc-debug-r59cm" event={"ID":"3216ccd3-6ece-43d0-912e-8ac72c9f87c0","Type":"ContainerDied","Data":"ef84a995909137738dc07838aaa28211b9e00a98a9070df65a2c11608924a26a"} Dec 01 20:13:30 crc kubenswrapper[4935]: I1201 20:13:30.801119 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef84a995909137738dc07838aaa28211b9e00a98a9070df65a2c11608924a26a" Dec 01 20:13:30 crc kubenswrapper[4935]: I1201 20:13:30.800781 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/crc-debug-r59cm" Dec 01 20:13:31 crc kubenswrapper[4935]: I1201 20:13:31.232343 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rhfwr/crc-debug-r59cm"] Dec 01 20:13:31 crc kubenswrapper[4935]: I1201 20:13:31.243384 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rhfwr/crc-debug-r59cm"] Dec 01 20:13:32 crc kubenswrapper[4935]: I1201 20:13:32.387154 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-rhfwr/crc-debug-x5mkn"] Dec 01 20:13:32 crc kubenswrapper[4935]: E1201 20:13:32.387888 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3216ccd3-6ece-43d0-912e-8ac72c9f87c0" containerName="container-00" Dec 01 20:13:32 crc kubenswrapper[4935]: I1201 20:13:32.387900 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="3216ccd3-6ece-43d0-912e-8ac72c9f87c0" containerName="container-00" Dec 01 20:13:32 crc kubenswrapper[4935]: I1201 20:13:32.388182 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="3216ccd3-6ece-43d0-912e-8ac72c9f87c0" containerName="container-00" Dec 01 20:13:32 crc kubenswrapper[4935]: I1201 20:13:32.389357 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/crc-debug-x5mkn" Dec 01 20:13:32 crc kubenswrapper[4935]: I1201 20:13:32.531686 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3216ccd3-6ece-43d0-912e-8ac72c9f87c0" path="/var/lib/kubelet/pods/3216ccd3-6ece-43d0-912e-8ac72c9f87c0/volumes" Dec 01 20:13:32 crc kubenswrapper[4935]: I1201 20:13:32.552892 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ab4cd373-c3a4-4954-966c-b66c11361ef1-host\") pod \"crc-debug-x5mkn\" (UID: \"ab4cd373-c3a4-4954-966c-b66c11361ef1\") " pod="openshift-must-gather-rhfwr/crc-debug-x5mkn" Dec 01 20:13:32 crc kubenswrapper[4935]: I1201 20:13:32.553028 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtx25\" (UniqueName: \"kubernetes.io/projected/ab4cd373-c3a4-4954-966c-b66c11361ef1-kube-api-access-jtx25\") pod \"crc-debug-x5mkn\" (UID: \"ab4cd373-c3a4-4954-966c-b66c11361ef1\") " pod="openshift-must-gather-rhfwr/crc-debug-x5mkn" Dec 01 20:13:32 crc kubenswrapper[4935]: I1201 20:13:32.655489 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtx25\" (UniqueName: \"kubernetes.io/projected/ab4cd373-c3a4-4954-966c-b66c11361ef1-kube-api-access-jtx25\") pod \"crc-debug-x5mkn\" (UID: \"ab4cd373-c3a4-4954-966c-b66c11361ef1\") " pod="openshift-must-gather-rhfwr/crc-debug-x5mkn" Dec 01 20:13:32 crc kubenswrapper[4935]: I1201 20:13:32.655861 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ab4cd373-c3a4-4954-966c-b66c11361ef1-host\") pod \"crc-debug-x5mkn\" (UID: \"ab4cd373-c3a4-4954-966c-b66c11361ef1\") " pod="openshift-must-gather-rhfwr/crc-debug-x5mkn" Dec 01 20:13:32 crc kubenswrapper[4935]: I1201 20:13:32.655967 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ab4cd373-c3a4-4954-966c-b66c11361ef1-host\") pod \"crc-debug-x5mkn\" (UID: \"ab4cd373-c3a4-4954-966c-b66c11361ef1\") " pod="openshift-must-gather-rhfwr/crc-debug-x5mkn" Dec 01 20:13:32 crc kubenswrapper[4935]: I1201 20:13:32.675708 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtx25\" (UniqueName: \"kubernetes.io/projected/ab4cd373-c3a4-4954-966c-b66c11361ef1-kube-api-access-jtx25\") pod \"crc-debug-x5mkn\" (UID: \"ab4cd373-c3a4-4954-966c-b66c11361ef1\") " pod="openshift-must-gather-rhfwr/crc-debug-x5mkn" Dec 01 20:13:32 crc kubenswrapper[4935]: I1201 20:13:32.706764 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/crc-debug-x5mkn" Dec 01 20:13:32 crc kubenswrapper[4935]: I1201 20:13:32.827023 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rhfwr/crc-debug-x5mkn" event={"ID":"ab4cd373-c3a4-4954-966c-b66c11361ef1","Type":"ContainerStarted","Data":"adf1a1b527ec2a07fffdf11c0b10cc330ab86162ab030635bdf919ce137d19e2"} Dec 01 20:13:33 crc kubenswrapper[4935]: I1201 20:13:33.842006 4935 generic.go:334] "Generic (PLEG): container finished" podID="ab4cd373-c3a4-4954-966c-b66c11361ef1" containerID="ff4aa01fa828ccad1805f5318bb4bab95a6a5dade7668b43438a077a52a7486d" exitCode=0 Dec 01 20:13:33 crc kubenswrapper[4935]: I1201 20:13:33.842068 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rhfwr/crc-debug-x5mkn" event={"ID":"ab4cd373-c3a4-4954-966c-b66c11361ef1","Type":"ContainerDied","Data":"ff4aa01fa828ccad1805f5318bb4bab95a6a5dade7668b43438a077a52a7486d"} Dec 01 20:13:33 crc kubenswrapper[4935]: I1201 20:13:33.896982 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rhfwr/crc-debug-x5mkn"] Dec 01 20:13:33 crc kubenswrapper[4935]: I1201 20:13:33.909743 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rhfwr/crc-debug-x5mkn"] Dec 01 20:13:34 crc kubenswrapper[4935]: I1201 20:13:34.975652 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/crc-debug-x5mkn" Dec 01 20:13:35 crc kubenswrapper[4935]: I1201 20:13:35.121371 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ab4cd373-c3a4-4954-966c-b66c11361ef1-host\") pod \"ab4cd373-c3a4-4954-966c-b66c11361ef1\" (UID: \"ab4cd373-c3a4-4954-966c-b66c11361ef1\") " Dec 01 20:13:35 crc kubenswrapper[4935]: I1201 20:13:35.121554 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab4cd373-c3a4-4954-966c-b66c11361ef1-host" (OuterVolumeSpecName: "host") pod "ab4cd373-c3a4-4954-966c-b66c11361ef1" (UID: "ab4cd373-c3a4-4954-966c-b66c11361ef1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:13:35 crc kubenswrapper[4935]: I1201 20:13:35.122099 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtx25\" (UniqueName: \"kubernetes.io/projected/ab4cd373-c3a4-4954-966c-b66c11361ef1-kube-api-access-jtx25\") pod \"ab4cd373-c3a4-4954-966c-b66c11361ef1\" (UID: \"ab4cd373-c3a4-4954-966c-b66c11361ef1\") " Dec 01 20:13:35 crc kubenswrapper[4935]: I1201 20:13:35.123235 4935 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ab4cd373-c3a4-4954-966c-b66c11361ef1-host\") on node \"crc\" DevicePath \"\"" Dec 01 20:13:35 crc kubenswrapper[4935]: I1201 20:13:35.130089 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab4cd373-c3a4-4954-966c-b66c11361ef1-kube-api-access-jtx25" (OuterVolumeSpecName: "kube-api-access-jtx25") pod "ab4cd373-c3a4-4954-966c-b66c11361ef1" (UID: "ab4cd373-c3a4-4954-966c-b66c11361ef1"). InnerVolumeSpecName "kube-api-access-jtx25". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:13:35 crc kubenswrapper[4935]: I1201 20:13:35.225303 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtx25\" (UniqueName: \"kubernetes.io/projected/ab4cd373-c3a4-4954-966c-b66c11361ef1-kube-api-access-jtx25\") on node \"crc\" DevicePath \"\"" Dec 01 20:13:35 crc kubenswrapper[4935]: I1201 20:13:35.872476 4935 scope.go:117] "RemoveContainer" containerID="ff4aa01fa828ccad1805f5318bb4bab95a6a5dade7668b43438a077a52a7486d" Dec 01 20:13:35 crc kubenswrapper[4935]: I1201 20:13:35.872499 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/crc-debug-x5mkn" Dec 01 20:13:36 crc kubenswrapper[4935]: I1201 20:13:36.528070 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab4cd373-c3a4-4954-966c-b66c11361ef1" path="/var/lib/kubelet/pods/ab4cd373-c3a4-4954-966c-b66c11361ef1/volumes" Dec 01 20:14:01 crc kubenswrapper[4935]: I1201 20:14:01.268913 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_33ca5248-0c8e-4b6d-81ad-15c4c328dbd8/aodh-api/0.log" Dec 01 20:14:01 crc kubenswrapper[4935]: I1201 20:14:01.406381 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_33ca5248-0c8e-4b6d-81ad-15c4c328dbd8/aodh-evaluator/0.log" Dec 01 20:14:01 crc kubenswrapper[4935]: I1201 20:14:01.477956 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_33ca5248-0c8e-4b6d-81ad-15c4c328dbd8/aodh-listener/0.log" Dec 01 20:14:01 crc kubenswrapper[4935]: I1201 20:14:01.526704 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_33ca5248-0c8e-4b6d-81ad-15c4c328dbd8/aodh-notifier/0.log" Dec 01 20:14:01 crc kubenswrapper[4935]: I1201 20:14:01.645809 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5f6b854f9d-n7chb_0f1b009e-5300-416a-a397-79765cbcad0d/barbican-api/0.log" Dec 01 20:14:01 crc kubenswrapper[4935]: I1201 20:14:01.789826 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5f6b854f9d-n7chb_0f1b009e-5300-416a-a397-79765cbcad0d/barbican-api-log/0.log" Dec 01 20:14:01 crc kubenswrapper[4935]: I1201 20:14:01.939427 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-684877c94b-g4nmw_4d980d57-2d10-4065-990f-e381180f2175/barbican-keystone-listener/0.log" Dec 01 20:14:02 crc kubenswrapper[4935]: I1201 20:14:02.041861 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-684877c94b-g4nmw_4d980d57-2d10-4065-990f-e381180f2175/barbican-keystone-listener-log/0.log" Dec 01 20:14:02 crc kubenswrapper[4935]: I1201 20:14:02.144790 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6b8c989c57-rgsh8_05c203ba-25f0-4331-bf4f-19593176e6a1/barbican-worker/0.log" Dec 01 20:14:02 crc kubenswrapper[4935]: I1201 20:14:02.226820 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6b8c989c57-rgsh8_05c203ba-25f0-4331-bf4f-19593176e6a1/barbican-worker-log/0.log" Dec 01 20:14:02 crc kubenswrapper[4935]: I1201 20:14:02.407382 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-cf9jf_a552da3f-247b-4339-a48d-79a3c948af00/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:02 crc kubenswrapper[4935]: I1201 20:14:02.661724 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_0f43eed1-cea2-4621-8fec-f55587776177/ceilometer-central-agent/0.log" Dec 01 20:14:02 crc kubenswrapper[4935]: I1201 20:14:02.905343 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_0f43eed1-cea2-4621-8fec-f55587776177/ceilometer-notification-agent/0.log" Dec 01 20:14:03 crc kubenswrapper[4935]: I1201 20:14:03.030090 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_0f43eed1-cea2-4621-8fec-f55587776177/proxy-httpd/0.log" Dec 01 20:14:03 crc kubenswrapper[4935]: I1201 20:14:03.076514 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_0f43eed1-cea2-4621-8fec-f55587776177/sg-core/0.log" Dec 01 20:14:03 crc kubenswrapper[4935]: I1201 20:14:03.275677 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_4ea7d483-2743-49a5-b6f0-a0e3355c2f2b/cinder-api-log/0.log" Dec 01 20:14:03 crc kubenswrapper[4935]: I1201 20:14:03.308128 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_4ea7d483-2743-49a5-b6f0-a0e3355c2f2b/cinder-api/0.log" Dec 01 20:14:03 crc kubenswrapper[4935]: I1201 20:14:03.560899 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_5f18d08d-0b37-4ae1-afac-5377ccb99cc2/cinder-scheduler/0.log" Dec 01 20:14:03 crc kubenswrapper[4935]: I1201 20:14:03.790086 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_5f18d08d-0b37-4ae1-afac-5377ccb99cc2/probe/0.log" Dec 01 20:14:03 crc kubenswrapper[4935]: I1201 20:14:03.871336 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-ppwrn_0048b1eb-735d-437d-b7bd-ad2814905c56/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:04 crc kubenswrapper[4935]: I1201 20:14:04.105794 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-rzgwc_8de36041-d199-4faa-91f8-c5b974a39b83/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:04 crc kubenswrapper[4935]: I1201 20:14:04.217214 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5d75f767dc-nxgr9_d4211a91-6935-4af5-8eb9-a3941c9b5293/init/0.log" Dec 01 20:14:04 crc kubenswrapper[4935]: I1201 20:14:04.517941 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5d75f767dc-nxgr9_d4211a91-6935-4af5-8eb9-a3941c9b5293/init/0.log" Dec 01 20:14:04 crc kubenswrapper[4935]: I1201 20:14:04.613069 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-cmkcb_763abe02-1bdf-4403-a139-a15aba539519/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:04 crc kubenswrapper[4935]: I1201 20:14:04.645908 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5d75f767dc-nxgr9_d4211a91-6935-4af5-8eb9-a3941c9b5293/dnsmasq-dns/0.log" Dec 01 20:14:04 crc kubenswrapper[4935]: I1201 20:14:04.891353 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_c06fe677-8ca0-4f63-ac6a-b83590981bca/glance-httpd/0.log" Dec 01 20:14:04 crc kubenswrapper[4935]: I1201 20:14:04.893093 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_c06fe677-8ca0-4f63-ac6a-b83590981bca/glance-log/0.log" Dec 01 20:14:05 crc kubenswrapper[4935]: I1201 20:14:05.145804 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3b9ac927-63ca-47c1-b78c-b93dae9abdb8/glance-httpd/0.log" Dec 01 20:14:05 crc kubenswrapper[4935]: I1201 20:14:05.156505 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3b9ac927-63ca-47c1-b78c-b93dae9abdb8/glance-log/0.log" Dec 01 20:14:05 crc kubenswrapper[4935]: I1201 20:14:05.874000 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-btfmc_378c4e86-bba0-4b39-8708-c925caf2756b/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:05 crc kubenswrapper[4935]: I1201 20:14:05.916717 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-6bfbdbbb99-fln9r_b5fc477d-3538-47ad-ae75-b9053d6eb06f/heat-engine/0.log" Dec 01 20:14:06 crc kubenswrapper[4935]: I1201 20:14:06.149085 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-gcqp7_4602269a-c17c-4ef2-9484-645469b97214/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:06 crc kubenswrapper[4935]: I1201 20:14:06.179018 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-659767c5f-48fgm_557d94df-ef5e-4a1e-9f9e-df761e2d6cb2/heat-api/0.log" Dec 01 20:14:06 crc kubenswrapper[4935]: I1201 20:14:06.458870 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29410261-xl5dh_1c5079eb-78df-422b-85e6-0f0cfed5f451/keystone-cron/0.log" Dec 01 20:14:06 crc kubenswrapper[4935]: I1201 20:14:06.641866 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-796bbb8f66-6s68g_4695e9f4-6780-4105-a952-7e00df3e9f05/heat-cfnapi/0.log" Dec 01 20:14:06 crc kubenswrapper[4935]: I1201 20:14:06.709415 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29410321-6wpck_387582b3-2696-4540-9866-138a6ea4394e/keystone-cron/0.log" Dec 01 20:14:06 crc kubenswrapper[4935]: I1201 20:14:06.891511 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6fb8649598-lsccp_3de16100-5a0b-457e-b23a-6efec4cca38e/keystone-api/0.log" Dec 01 20:14:07 crc kubenswrapper[4935]: I1201 20:14:07.102178 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_4b4e129a-da45-447c-af5d-6370bfee1066/kube-state-metrics/0.log" Dec 01 20:14:07 crc kubenswrapper[4935]: I1201 20:14:07.206171 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-szrdm_666d7f3a-f7f0-456e-b027-e68d2d8b1dbd/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:07 crc kubenswrapper[4935]: I1201 20:14:07.241754 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_logging-edpm-deployment-openstack-edpm-ipam-wh9xp_bb830086-fa18-46e6-877c-ab2bfbaea88b/logging-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:07 crc kubenswrapper[4935]: I1201 20:14:07.500980 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mysqld-exporter-0_6732fd86-efb5-4fec-b8d8-36ed6dfb12eb/mysqld-exporter/0.log" Dec 01 20:14:07 crc kubenswrapper[4935]: I1201 20:14:07.988547 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6f7cd766b5-766jz_e2e120ac-43cf-4de1-ba58-b0418d6ba9dd/neutron-httpd/0.log" Dec 01 20:14:08 crc kubenswrapper[4935]: I1201 20:14:08.001055 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-vclh5_bc10e14c-8acc-448c-addc-745b67376f6e/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:08 crc kubenswrapper[4935]: I1201 20:14:08.036398 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6f7cd766b5-766jz_e2e120ac-43cf-4de1-ba58-b0418d6ba9dd/neutron-api/0.log" Dec 01 20:14:08 crc kubenswrapper[4935]: I1201 20:14:08.961916 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_af39aa76-e3fe-4e4d-9d7a-75c43da4f301/nova-cell0-conductor-conductor/0.log" Dec 01 20:14:09 crc kubenswrapper[4935]: I1201 20:14:09.016138 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e84dfe6a-b9aa-406a-9b1a-895d826cd5a7/nova-api-log/0.log" Dec 01 20:14:09 crc kubenswrapper[4935]: I1201 20:14:09.248047 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_9af914fc-37f4-4830-9b72-aadeba875772/nova-cell1-conductor-conductor/0.log" Dec 01 20:14:09 crc kubenswrapper[4935]: I1201 20:14:09.421372 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_ff66b34c-0f33-4ac3-a71d-0470cd0b8517/nova-cell1-novncproxy-novncproxy/0.log" Dec 01 20:14:09 crc kubenswrapper[4935]: I1201 20:14:09.452462 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e84dfe6a-b9aa-406a-9b1a-895d826cd5a7/nova-api-api/0.log" Dec 01 20:14:09 crc kubenswrapper[4935]: I1201 20:14:09.613163 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-9fzcn_32e713b7-2006-4964-8c35-9b884a10c3d3/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:09 crc kubenswrapper[4935]: I1201 20:14:09.774947 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_34d0ec26-0662-497d-9cdc-278de9d991f4/nova-metadata-log/0.log" Dec 01 20:14:10 crc kubenswrapper[4935]: I1201 20:14:10.161523 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_f3f52b8c-640f-40b5-bb76-44c4387d6181/nova-scheduler-scheduler/0.log" Dec 01 20:14:10 crc kubenswrapper[4935]: I1201 20:14:10.230496 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_88d842df-da24-4955-aae0-e6125a01ed0b/mysql-bootstrap/0.log" Dec 01 20:14:10 crc kubenswrapper[4935]: I1201 20:14:10.459221 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_88d842df-da24-4955-aae0-e6125a01ed0b/galera/0.log" Dec 01 20:14:10 crc kubenswrapper[4935]: I1201 20:14:10.651081 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_88d842df-da24-4955-aae0-e6125a01ed0b/mysql-bootstrap/0.log" Dec 01 20:14:10 crc kubenswrapper[4935]: I1201 20:14:10.898262 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_a20a342c-d5f0-4a57-b485-5e8a122a6034/mysql-bootstrap/0.log" Dec 01 20:14:11 crc kubenswrapper[4935]: I1201 20:14:11.325475 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_a20a342c-d5f0-4a57-b485-5e8a122a6034/mysql-bootstrap/0.log" Dec 01 20:14:11 crc kubenswrapper[4935]: I1201 20:14:11.374047 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_a20a342c-d5f0-4a57-b485-5e8a122a6034/galera/0.log" Dec 01 20:14:11 crc kubenswrapper[4935]: I1201 20:14:11.563940 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_44f0cb67-763e-4db1-b920-6331dfb40ba3/openstackclient/0.log" Dec 01 20:14:11 crc kubenswrapper[4935]: I1201 20:14:11.619751 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-7twr7_3798fbe5-306b-43f9-8f1f-ddc928996f88/ovn-controller/0.log" Dec 01 20:14:11 crc kubenswrapper[4935]: I1201 20:14:11.906140 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-wh57b_d99a6072-ecb2-469f-a2e4-8fbb1c49a0e7/openstack-network-exporter/0.log" Dec 01 20:14:12 crc kubenswrapper[4935]: I1201 20:14:12.133965 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-zxbb9_f188ef33-496a-425c-87a1-54d67d7b42b5/ovsdb-server-init/0.log" Dec 01 20:14:12 crc kubenswrapper[4935]: I1201 20:14:12.382053 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_34d0ec26-0662-497d-9cdc-278de9d991f4/nova-metadata-metadata/0.log" Dec 01 20:14:12 crc kubenswrapper[4935]: I1201 20:14:12.387904 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-zxbb9_f188ef33-496a-425c-87a1-54d67d7b42b5/ovs-vswitchd/0.log" Dec 01 20:14:12 crc kubenswrapper[4935]: I1201 20:14:12.390094 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-zxbb9_f188ef33-496a-425c-87a1-54d67d7b42b5/ovsdb-server-init/0.log" Dec 01 20:14:12 crc kubenswrapper[4935]: I1201 20:14:12.406487 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-zxbb9_f188ef33-496a-425c-87a1-54d67d7b42b5/ovsdb-server/0.log" Dec 01 20:14:12 crc kubenswrapper[4935]: I1201 20:14:12.689850 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a8b4a63f-27e7-4af7-897a-204468754716/openstack-network-exporter/0.log" Dec 01 20:14:12 crc kubenswrapper[4935]: I1201 20:14:12.693161 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-ftdr5_c0020572-52c6-4df3-8074-935cd16a074e/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:12 crc kubenswrapper[4935]: I1201 20:14:12.986964 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_096839ae-3e36-4242-bfbd-e19bf1ada9f2/openstack-network-exporter/0.log" Dec 01 20:14:13 crc kubenswrapper[4935]: I1201 20:14:13.002048 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a8b4a63f-27e7-4af7-897a-204468754716/ovn-northd/0.log" Dec 01 20:14:13 crc kubenswrapper[4935]: I1201 20:14:13.056763 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_096839ae-3e36-4242-bfbd-e19bf1ada9f2/ovsdbserver-nb/0.log" Dec 01 20:14:13 crc kubenswrapper[4935]: I1201 20:14:13.202581 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_ead52c85-6fd1-4ba0-9d5d-09955ce5b967/openstack-network-exporter/0.log" Dec 01 20:14:13 crc kubenswrapper[4935]: I1201 20:14:13.318619 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_ead52c85-6fd1-4ba0-9d5d-09955ce5b967/ovsdbserver-sb/0.log" Dec 01 20:14:13 crc kubenswrapper[4935]: I1201 20:14:13.608375 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5d64f494d8-2clmq_954e6aa6-2067-4489-83e8-390033553c3e/placement-api/0.log" Dec 01 20:14:13 crc kubenswrapper[4935]: I1201 20:14:13.623753 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_236b4030-ceae-4159-b2c9-beb3b4eca661/init-config-reloader/0.log" Dec 01 20:14:13 crc kubenswrapper[4935]: I1201 20:14:13.671126 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5d64f494d8-2clmq_954e6aa6-2067-4489-83e8-390033553c3e/placement-log/0.log" Dec 01 20:14:13 crc kubenswrapper[4935]: I1201 20:14:13.888969 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_236b4030-ceae-4159-b2c9-beb3b4eca661/config-reloader/0.log" Dec 01 20:14:13 crc kubenswrapper[4935]: I1201 20:14:13.919033 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_236b4030-ceae-4159-b2c9-beb3b4eca661/prometheus/0.log" Dec 01 20:14:13 crc kubenswrapper[4935]: I1201 20:14:13.919400 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_236b4030-ceae-4159-b2c9-beb3b4eca661/init-config-reloader/0.log" Dec 01 20:14:13 crc kubenswrapper[4935]: I1201 20:14:13.976776 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_236b4030-ceae-4159-b2c9-beb3b4eca661/thanos-sidecar/0.log" Dec 01 20:14:14 crc kubenswrapper[4935]: I1201 20:14:14.159700 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4ba77f4b-156c-4d2e-9335-dab14bf1dcb3/setup-container/0.log" Dec 01 20:14:14 crc kubenswrapper[4935]: I1201 20:14:14.464872 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4ba77f4b-156c-4d2e-9335-dab14bf1dcb3/setup-container/0.log" Dec 01 20:14:14 crc kubenswrapper[4935]: I1201 20:14:14.520797 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4ba77f4b-156c-4d2e-9335-dab14bf1dcb3/rabbitmq/0.log" Dec 01 20:14:14 crc kubenswrapper[4935]: I1201 20:14:14.680503 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_428c8cdb-5fa3-4a5e-b249-1bb3713220a4/setup-container/0.log" Dec 01 20:14:15 crc kubenswrapper[4935]: I1201 20:14:15.097242 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_428c8cdb-5fa3-4a5e-b249-1bb3713220a4/rabbitmq/0.log" Dec 01 20:14:15 crc kubenswrapper[4935]: I1201 20:14:15.122790 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-286n9_8c994dd9-89ff-43a4-b424-65b8a2bfb67d/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:15 crc kubenswrapper[4935]: I1201 20:14:15.133463 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_428c8cdb-5fa3-4a5e-b249-1bb3713220a4/setup-container/0.log" Dec 01 20:14:15 crc kubenswrapper[4935]: I1201 20:14:15.362797 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-n5wm2_a6b95e39-a3e1-4e74-9e30-6c29a6aa8096/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:15 crc kubenswrapper[4935]: I1201 20:14:15.436290 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-jhpvl_499c1a41-8227-4cf9-8c15-99fd4a46f013/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:15 crc kubenswrapper[4935]: I1201 20:14:15.659942 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-6spvq_c13ec0c0-24c3-463c-b7c3-fcbf4bfbacd9/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:15 crc kubenswrapper[4935]: I1201 20:14:15.719193 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-h52hn_1b9b889f-4f7c-49c9-9bfa-cb42c62d0fa7/ssh-known-hosts-edpm-deployment/0.log" Dec 01 20:14:16 crc kubenswrapper[4935]: I1201 20:14:16.075581 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6b89d75d8c-8d6z5_d97b7792-f596-4358-8b02-1ae1368ac68d/proxy-server/0.log" Dec 01 20:14:16 crc kubenswrapper[4935]: I1201 20:14:16.152556 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-2v2t8_42f244f1-1b31-4831-8f12-f95ef0199c7c/swift-ring-rebalance/0.log" Dec 01 20:14:16 crc kubenswrapper[4935]: I1201 20:14:16.213337 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6b89d75d8c-8d6z5_d97b7792-f596-4358-8b02-1ae1368ac68d/proxy-httpd/0.log" Dec 01 20:14:16 crc kubenswrapper[4935]: I1201 20:14:16.393741 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/account-auditor/0.log" Dec 01 20:14:16 crc kubenswrapper[4935]: I1201 20:14:16.410851 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/account-reaper/0.log" Dec 01 20:14:16 crc kubenswrapper[4935]: I1201 20:14:16.585236 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/container-auditor/0.log" Dec 01 20:14:16 crc kubenswrapper[4935]: I1201 20:14:16.604441 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/account-replicator/0.log" Dec 01 20:14:16 crc kubenswrapper[4935]: I1201 20:14:16.607516 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/account-server/0.log" Dec 01 20:14:16 crc kubenswrapper[4935]: I1201 20:14:16.737138 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/container-replicator/0.log" Dec 01 20:14:16 crc kubenswrapper[4935]: I1201 20:14:16.816974 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/container-updater/0.log" Dec 01 20:14:16 crc kubenswrapper[4935]: I1201 20:14:16.845443 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/container-server/0.log" Dec 01 20:14:16 crc kubenswrapper[4935]: I1201 20:14:16.879614 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/object-auditor/0.log" Dec 01 20:14:17 crc kubenswrapper[4935]: I1201 20:14:17.036671 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/object-expirer/0.log" Dec 01 20:14:17 crc kubenswrapper[4935]: I1201 20:14:17.057453 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/object-server/0.log" Dec 01 20:14:17 crc kubenswrapper[4935]: I1201 20:14:17.080320 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/object-replicator/0.log" Dec 01 20:14:17 crc kubenswrapper[4935]: I1201 20:14:17.131627 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/object-updater/0.log" Dec 01 20:14:17 crc kubenswrapper[4935]: I1201 20:14:17.292860 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/swift-recon-cron/0.log" Dec 01 20:14:17 crc kubenswrapper[4935]: I1201 20:14:17.308402 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e0ee2844-1713-4b15-81f5-138cbc14fe03/rsync/0.log" Dec 01 20:14:17 crc kubenswrapper[4935]: I1201 20:14:17.447488 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-8dkdz_bc25f29a-826c-4823-95c8-1bba009e771f/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:17 crc kubenswrapper[4935]: I1201 20:14:17.641465 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-power-monitoring-edpm-deployment-openstack-edpm-lcz62_008102c1-54c3-4a58-8fec-a021793e2839/telemetry-power-monitoring-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:17 crc kubenswrapper[4935]: I1201 20:14:17.851677 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_69e5db88-4c8e-4e1a-926f-285b3f5312d1/test-operator-logs-container/0.log" Dec 01 20:14:18 crc kubenswrapper[4935]: I1201 20:14:18.121660 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-jjdnj_2fb18d0d-5377-4c16-86bb-8265ddf71223/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:14:18 crc kubenswrapper[4935]: I1201 20:14:18.238308 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_ed5fb4cf-a415-4429-af67-924e3f70cb3d/tempest-tests-tempest-tests-runner/0.log" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.334590 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hsdbw"] Dec 01 20:14:21 crc kubenswrapper[4935]: E1201 20:14:21.335785 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab4cd373-c3a4-4954-966c-b66c11361ef1" containerName="container-00" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.335802 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab4cd373-c3a4-4954-966c-b66c11361ef1" containerName="container-00" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.336081 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab4cd373-c3a4-4954-966c-b66c11361ef1" containerName="container-00" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.338280 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.458803 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e4e71fc-3265-4929-a37d-219bf83d02cb-catalog-content\") pod \"certified-operators-hsdbw\" (UID: \"3e4e71fc-3265-4929-a37d-219bf83d02cb\") " pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.459279 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzgnw\" (UniqueName: \"kubernetes.io/projected/3e4e71fc-3265-4929-a37d-219bf83d02cb-kube-api-access-kzgnw\") pod \"certified-operators-hsdbw\" (UID: \"3e4e71fc-3265-4929-a37d-219bf83d02cb\") " pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.459532 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e4e71fc-3265-4929-a37d-219bf83d02cb-utilities\") pod \"certified-operators-hsdbw\" (UID: \"3e4e71fc-3265-4929-a37d-219bf83d02cb\") " pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.460422 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hsdbw"] Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.561482 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e4e71fc-3265-4929-a37d-219bf83d02cb-catalog-content\") pod \"certified-operators-hsdbw\" (UID: \"3e4e71fc-3265-4929-a37d-219bf83d02cb\") " pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.561591 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzgnw\" (UniqueName: \"kubernetes.io/projected/3e4e71fc-3265-4929-a37d-219bf83d02cb-kube-api-access-kzgnw\") pod \"certified-operators-hsdbw\" (UID: \"3e4e71fc-3265-4929-a37d-219bf83d02cb\") " pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.561730 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e4e71fc-3265-4929-a37d-219bf83d02cb-utilities\") pod \"certified-operators-hsdbw\" (UID: \"3e4e71fc-3265-4929-a37d-219bf83d02cb\") " pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.563812 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e4e71fc-3265-4929-a37d-219bf83d02cb-catalog-content\") pod \"certified-operators-hsdbw\" (UID: \"3e4e71fc-3265-4929-a37d-219bf83d02cb\") " pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.566047 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e4e71fc-3265-4929-a37d-219bf83d02cb-utilities\") pod \"certified-operators-hsdbw\" (UID: \"3e4e71fc-3265-4929-a37d-219bf83d02cb\") " pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.610003 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzgnw\" (UniqueName: \"kubernetes.io/projected/3e4e71fc-3265-4929-a37d-219bf83d02cb-kube-api-access-kzgnw\") pod \"certified-operators-hsdbw\" (UID: \"3e4e71fc-3265-4929-a37d-219bf83d02cb\") " pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:21 crc kubenswrapper[4935]: I1201 20:14:21.660677 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:22 crc kubenswrapper[4935]: I1201 20:14:22.548634 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hsdbw"] Dec 01 20:14:23 crc kubenswrapper[4935]: I1201 20:14:23.436020 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdbw" event={"ID":"3e4e71fc-3265-4929-a37d-219bf83d02cb","Type":"ContainerStarted","Data":"363ec8185f646794c7e5206943e88c87a5188ba5d292ae1fcc51d591b48cc3dd"} Dec 01 20:14:24 crc kubenswrapper[4935]: I1201 20:14:24.447034 4935 generic.go:334] "Generic (PLEG): container finished" podID="3e4e71fc-3265-4929-a37d-219bf83d02cb" containerID="681aa28f2387dea0ab92c83476d20522446e0ec7c334fc27eccc9be785ff7c26" exitCode=0 Dec 01 20:14:24 crc kubenswrapper[4935]: I1201 20:14:24.447104 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdbw" event={"ID":"3e4e71fc-3265-4929-a37d-219bf83d02cb","Type":"ContainerDied","Data":"681aa28f2387dea0ab92c83476d20522446e0ec7c334fc27eccc9be785ff7c26"} Dec 01 20:14:26 crc kubenswrapper[4935]: I1201 20:14:26.421856 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_6ad10b89-b196-46ba-8b53-a10f8b2a5310/memcached/0.log" Dec 01 20:14:26 crc kubenswrapper[4935]: I1201 20:14:26.479920 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdbw" event={"ID":"3e4e71fc-3265-4929-a37d-219bf83d02cb","Type":"ContainerStarted","Data":"05f6a3929bb8c792b0344f8f00954d92525da9451223bec93bc9ff581790f162"} Dec 01 20:14:27 crc kubenswrapper[4935]: I1201 20:14:27.491601 4935 generic.go:334] "Generic (PLEG): container finished" podID="3e4e71fc-3265-4929-a37d-219bf83d02cb" containerID="05f6a3929bb8c792b0344f8f00954d92525da9451223bec93bc9ff581790f162" exitCode=0 Dec 01 20:14:27 crc kubenswrapper[4935]: I1201 20:14:27.491751 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdbw" event={"ID":"3e4e71fc-3265-4929-a37d-219bf83d02cb","Type":"ContainerDied","Data":"05f6a3929bb8c792b0344f8f00954d92525da9451223bec93bc9ff581790f162"} Dec 01 20:14:28 crc kubenswrapper[4935]: I1201 20:14:28.503510 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdbw" event={"ID":"3e4e71fc-3265-4929-a37d-219bf83d02cb","Type":"ContainerStarted","Data":"63a7a1dd1e32b764f8ad7ccef03c06258b7271a16cc899340441dd714755ef29"} Dec 01 20:14:28 crc kubenswrapper[4935]: I1201 20:14:28.532055 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hsdbw" podStartSLOduration=3.921252675 podStartE2EDuration="7.532032351s" podCreationTimestamp="2025-12-01 20:14:21 +0000 UTC" firstStartedPulling="2025-12-01 20:14:24.44874262 +0000 UTC m=+6278.470371879" lastFinishedPulling="2025-12-01 20:14:28.059522296 +0000 UTC m=+6282.081151555" observedRunningTime="2025-12-01 20:14:28.526228611 +0000 UTC m=+6282.547857880" watchObservedRunningTime="2025-12-01 20:14:28.532032351 +0000 UTC m=+6282.553661610" Dec 01 20:14:31 crc kubenswrapper[4935]: I1201 20:14:31.661179 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:31 crc kubenswrapper[4935]: I1201 20:14:31.661772 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:31 crc kubenswrapper[4935]: I1201 20:14:31.742641 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:41 crc kubenswrapper[4935]: I1201 20:14:41.718881 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:41 crc kubenswrapper[4935]: I1201 20:14:41.778264 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hsdbw"] Dec 01 20:14:42 crc kubenswrapper[4935]: I1201 20:14:42.654197 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hsdbw" podUID="3e4e71fc-3265-4929-a37d-219bf83d02cb" containerName="registry-server" containerID="cri-o://63a7a1dd1e32b764f8ad7ccef03c06258b7271a16cc899340441dd714755ef29" gracePeriod=2 Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.224895 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.286876 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e4e71fc-3265-4929-a37d-219bf83d02cb-utilities\") pod \"3e4e71fc-3265-4929-a37d-219bf83d02cb\" (UID: \"3e4e71fc-3265-4929-a37d-219bf83d02cb\") " Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.287250 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzgnw\" (UniqueName: \"kubernetes.io/projected/3e4e71fc-3265-4929-a37d-219bf83d02cb-kube-api-access-kzgnw\") pod \"3e4e71fc-3265-4929-a37d-219bf83d02cb\" (UID: \"3e4e71fc-3265-4929-a37d-219bf83d02cb\") " Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.287289 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e4e71fc-3265-4929-a37d-219bf83d02cb-catalog-content\") pod \"3e4e71fc-3265-4929-a37d-219bf83d02cb\" (UID: \"3e4e71fc-3265-4929-a37d-219bf83d02cb\") " Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.290714 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e4e71fc-3265-4929-a37d-219bf83d02cb-utilities" (OuterVolumeSpecName: "utilities") pod "3e4e71fc-3265-4929-a37d-219bf83d02cb" (UID: "3e4e71fc-3265-4929-a37d-219bf83d02cb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.297602 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e4e71fc-3265-4929-a37d-219bf83d02cb-kube-api-access-kzgnw" (OuterVolumeSpecName: "kube-api-access-kzgnw") pod "3e4e71fc-3265-4929-a37d-219bf83d02cb" (UID: "3e4e71fc-3265-4929-a37d-219bf83d02cb"). InnerVolumeSpecName "kube-api-access-kzgnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.345287 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e4e71fc-3265-4929-a37d-219bf83d02cb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3e4e71fc-3265-4929-a37d-219bf83d02cb" (UID: "3e4e71fc-3265-4929-a37d-219bf83d02cb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.389803 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e4e71fc-3265-4929-a37d-219bf83d02cb-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.389841 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzgnw\" (UniqueName: \"kubernetes.io/projected/3e4e71fc-3265-4929-a37d-219bf83d02cb-kube-api-access-kzgnw\") on node \"crc\" DevicePath \"\"" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.389852 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e4e71fc-3265-4929-a37d-219bf83d02cb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.667656 4935 generic.go:334] "Generic (PLEG): container finished" podID="3e4e71fc-3265-4929-a37d-219bf83d02cb" containerID="63a7a1dd1e32b764f8ad7ccef03c06258b7271a16cc899340441dd714755ef29" exitCode=0 Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.667712 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsdbw" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.667733 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdbw" event={"ID":"3e4e71fc-3265-4929-a37d-219bf83d02cb","Type":"ContainerDied","Data":"63a7a1dd1e32b764f8ad7ccef03c06258b7271a16cc899340441dd714755ef29"} Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.668200 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdbw" event={"ID":"3e4e71fc-3265-4929-a37d-219bf83d02cb","Type":"ContainerDied","Data":"363ec8185f646794c7e5206943e88c87a5188ba5d292ae1fcc51d591b48cc3dd"} Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.668221 4935 scope.go:117] "RemoveContainer" containerID="63a7a1dd1e32b764f8ad7ccef03c06258b7271a16cc899340441dd714755ef29" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.728411 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hsdbw"] Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.734753 4935 scope.go:117] "RemoveContainer" containerID="05f6a3929bb8c792b0344f8f00954d92525da9451223bec93bc9ff581790f162" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.741253 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hsdbw"] Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.764568 4935 scope.go:117] "RemoveContainer" containerID="681aa28f2387dea0ab92c83476d20522446e0ec7c334fc27eccc9be785ff7c26" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.825867 4935 scope.go:117] "RemoveContainer" containerID="63a7a1dd1e32b764f8ad7ccef03c06258b7271a16cc899340441dd714755ef29" Dec 01 20:14:43 crc kubenswrapper[4935]: E1201 20:14:43.826986 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63a7a1dd1e32b764f8ad7ccef03c06258b7271a16cc899340441dd714755ef29\": container with ID starting with 63a7a1dd1e32b764f8ad7ccef03c06258b7271a16cc899340441dd714755ef29 not found: ID does not exist" containerID="63a7a1dd1e32b764f8ad7ccef03c06258b7271a16cc899340441dd714755ef29" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.827052 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63a7a1dd1e32b764f8ad7ccef03c06258b7271a16cc899340441dd714755ef29"} err="failed to get container status \"63a7a1dd1e32b764f8ad7ccef03c06258b7271a16cc899340441dd714755ef29\": rpc error: code = NotFound desc = could not find container \"63a7a1dd1e32b764f8ad7ccef03c06258b7271a16cc899340441dd714755ef29\": container with ID starting with 63a7a1dd1e32b764f8ad7ccef03c06258b7271a16cc899340441dd714755ef29 not found: ID does not exist" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.827095 4935 scope.go:117] "RemoveContainer" containerID="05f6a3929bb8c792b0344f8f00954d92525da9451223bec93bc9ff581790f162" Dec 01 20:14:43 crc kubenswrapper[4935]: E1201 20:14:43.827534 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05f6a3929bb8c792b0344f8f00954d92525da9451223bec93bc9ff581790f162\": container with ID starting with 05f6a3929bb8c792b0344f8f00954d92525da9451223bec93bc9ff581790f162 not found: ID does not exist" containerID="05f6a3929bb8c792b0344f8f00954d92525da9451223bec93bc9ff581790f162" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.827575 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05f6a3929bb8c792b0344f8f00954d92525da9451223bec93bc9ff581790f162"} err="failed to get container status \"05f6a3929bb8c792b0344f8f00954d92525da9451223bec93bc9ff581790f162\": rpc error: code = NotFound desc = could not find container \"05f6a3929bb8c792b0344f8f00954d92525da9451223bec93bc9ff581790f162\": container with ID starting with 05f6a3929bb8c792b0344f8f00954d92525da9451223bec93bc9ff581790f162 not found: ID does not exist" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.827601 4935 scope.go:117] "RemoveContainer" containerID="681aa28f2387dea0ab92c83476d20522446e0ec7c334fc27eccc9be785ff7c26" Dec 01 20:14:43 crc kubenswrapper[4935]: E1201 20:14:43.827832 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"681aa28f2387dea0ab92c83476d20522446e0ec7c334fc27eccc9be785ff7c26\": container with ID starting with 681aa28f2387dea0ab92c83476d20522446e0ec7c334fc27eccc9be785ff7c26 not found: ID does not exist" containerID="681aa28f2387dea0ab92c83476d20522446e0ec7c334fc27eccc9be785ff7c26" Dec 01 20:14:43 crc kubenswrapper[4935]: I1201 20:14:43.827861 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"681aa28f2387dea0ab92c83476d20522446e0ec7c334fc27eccc9be785ff7c26"} err="failed to get container status \"681aa28f2387dea0ab92c83476d20522446e0ec7c334fc27eccc9be785ff7c26\": rpc error: code = NotFound desc = could not find container \"681aa28f2387dea0ab92c83476d20522446e0ec7c334fc27eccc9be785ff7c26\": container with ID starting with 681aa28f2387dea0ab92c83476d20522446e0ec7c334fc27eccc9be785ff7c26 not found: ID does not exist" Dec 01 20:14:44 crc kubenswrapper[4935]: I1201 20:14:44.524027 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e4e71fc-3265-4929-a37d-219bf83d02cb" path="/var/lib/kubelet/pods/3e4e71fc-3265-4929-a37d-219bf83d02cb/volumes" Dec 01 20:14:47 crc kubenswrapper[4935]: I1201 20:14:47.030104 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj_a7adc7fa-0b6a-4433-8e11-53e6ba203427/util/0.log" Dec 01 20:14:47 crc kubenswrapper[4935]: I1201 20:14:47.250511 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj_a7adc7fa-0b6a-4433-8e11-53e6ba203427/util/0.log" Dec 01 20:14:47 crc kubenswrapper[4935]: I1201 20:14:47.257769 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj_a7adc7fa-0b6a-4433-8e11-53e6ba203427/pull/0.log" Dec 01 20:14:47 crc kubenswrapper[4935]: I1201 20:14:47.258752 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj_a7adc7fa-0b6a-4433-8e11-53e6ba203427/pull/0.log" Dec 01 20:14:47 crc kubenswrapper[4935]: I1201 20:14:47.460302 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj_a7adc7fa-0b6a-4433-8e11-53e6ba203427/pull/0.log" Dec 01 20:14:47 crc kubenswrapper[4935]: I1201 20:14:47.493640 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj_a7adc7fa-0b6a-4433-8e11-53e6ba203427/extract/0.log" Dec 01 20:14:47 crc kubenswrapper[4935]: I1201 20:14:47.505522 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_382fdd6670e7606db8f4e43c61a7e88956c3b0e1b77b19d197db8629c5x6tqj_a7adc7fa-0b6a-4433-8e11-53e6ba203427/util/0.log" Dec 01 20:14:47 crc kubenswrapper[4935]: I1201 20:14:47.653537 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-4crq6_a66fb641-eb39-4326-a4cb-d4e006a57436/kube-rbac-proxy/0.log" Dec 01 20:14:47 crc kubenswrapper[4935]: I1201 20:14:47.747536 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-w2v9w_8a1ded04-5c24-467c-a51b-c0cfbe67ba4b/kube-rbac-proxy/0.log" Dec 01 20:14:47 crc kubenswrapper[4935]: I1201 20:14:47.821767 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-4crq6_a66fb641-eb39-4326-a4cb-d4e006a57436/manager/0.log" Dec 01 20:14:47 crc kubenswrapper[4935]: I1201 20:14:47.920564 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-w2v9w_8a1ded04-5c24-467c-a51b-c0cfbe67ba4b/manager/0.log" Dec 01 20:14:48 crc kubenswrapper[4935]: I1201 20:14:48.009718 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-7hmvv_90db9fa3-a008-4a95-910d-fd7b92f37dea/kube-rbac-proxy/0.log" Dec 01 20:14:48 crc kubenswrapper[4935]: I1201 20:14:48.046989 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-7hmvv_90db9fa3-a008-4a95-910d-fd7b92f37dea/manager/0.log" Dec 01 20:14:48 crc kubenswrapper[4935]: I1201 20:14:48.295940 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-wwjxq_122ce04b-8536-401a-820f-fd1b9f04afcf/kube-rbac-proxy/0.log" Dec 01 20:14:48 crc kubenswrapper[4935]: I1201 20:14:48.398535 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-wwjxq_122ce04b-8536-401a-820f-fd1b9f04afcf/manager/0.log" Dec 01 20:14:48 crc kubenswrapper[4935]: I1201 20:14:48.436529 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-j7bwb_e3aa8650-ce39-4eb2-8cae-eb012347abb6/kube-rbac-proxy/0.log" Dec 01 20:14:48 crc kubenswrapper[4935]: I1201 20:14:48.616530 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-zx4xc_671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161/kube-rbac-proxy/0.log" Dec 01 20:14:48 crc kubenswrapper[4935]: I1201 20:14:48.620540 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-j7bwb_e3aa8650-ce39-4eb2-8cae-eb012347abb6/manager/0.log" Dec 01 20:14:48 crc kubenswrapper[4935]: I1201 20:14:48.705069 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-zx4xc_671c0c0c-b6c5-47aa-bf90-2ba7ec2a5161/manager/0.log" Dec 01 20:14:48 crc kubenswrapper[4935]: I1201 20:14:48.841846 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-k9kcp_38100ae6-51a8-4a49-87d1-704ea8b5a0bc/kube-rbac-proxy/0.log" Dec 01 20:14:49 crc kubenswrapper[4935]: I1201 20:14:49.006079 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-k9kcp_38100ae6-51a8-4a49-87d1-704ea8b5a0bc/manager/0.log" Dec 01 20:14:49 crc kubenswrapper[4935]: I1201 20:14:49.074616 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-vtzkd_d0336662-89bd-415e-8c22-2b05bf5dbf9f/kube-rbac-proxy/0.log" Dec 01 20:14:49 crc kubenswrapper[4935]: I1201 20:14:49.116401 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-vtzkd_d0336662-89bd-415e-8c22-2b05bf5dbf9f/manager/0.log" Dec 01 20:14:49 crc kubenswrapper[4935]: I1201 20:14:49.230997 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-vk8hm_8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6/kube-rbac-proxy/0.log" Dec 01 20:14:49 crc kubenswrapper[4935]: I1201 20:14:49.390648 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-vk8hm_8c32d7fe-2d91-47ac-b0ad-c0b0d5cab9f6/manager/0.log" Dec 01 20:14:49 crc kubenswrapper[4935]: I1201 20:14:49.459722 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-7ppgb_b5460053-e8df-4350-a4a4-ff44683d9f60/kube-rbac-proxy/0.log" Dec 01 20:14:49 crc kubenswrapper[4935]: I1201 20:14:49.488490 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-7ppgb_b5460053-e8df-4350-a4a4-ff44683d9f60/manager/0.log" Dec 01 20:14:49 crc kubenswrapper[4935]: I1201 20:14:49.658555 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-qth5v_d19dd9f6-38a2-4bdb-be7b-54184b15b7ab/kube-rbac-proxy/0.log" Dec 01 20:14:49 crc kubenswrapper[4935]: I1201 20:14:49.715000 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-qth5v_d19dd9f6-38a2-4bdb-be7b-54184b15b7ab/manager/0.log" Dec 01 20:14:49 crc kubenswrapper[4935]: I1201 20:14:49.800434 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-wj722_07d2f6f9-58fc-4c36-b2b8-ce0c48424c28/kube-rbac-proxy/0.log" Dec 01 20:14:49 crc kubenswrapper[4935]: I1201 20:14:49.920510 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-wj722_07d2f6f9-58fc-4c36-b2b8-ce0c48424c28/manager/0.log" Dec 01 20:14:49 crc kubenswrapper[4935]: I1201 20:14:49.999351 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-8xpwk_61654c85-dd73-48d3-9931-1ce7095e4f07/kube-rbac-proxy/0.log" Dec 01 20:14:50 crc kubenswrapper[4935]: I1201 20:14:50.062640 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-8xpwk_61654c85-dd73-48d3-9931-1ce7095e4f07/manager/0.log" Dec 01 20:14:50 crc kubenswrapper[4935]: I1201 20:14:50.132405 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-25tbb_114bfc93-038f-416e-8a85-f1697387b2e2/kube-rbac-proxy/0.log" Dec 01 20:14:50 crc kubenswrapper[4935]: I1201 20:14:50.215693 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-25tbb_114bfc93-038f-416e-8a85-f1697387b2e2/manager/0.log" Dec 01 20:14:50 crc kubenswrapper[4935]: I1201 20:14:50.300590 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw_fef2b5dc-a162-4b91-ada5-f6af85d8fe20/kube-rbac-proxy/0.log" Dec 01 20:14:50 crc kubenswrapper[4935]: I1201 20:14:50.784986 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4bmxkw_fef2b5dc-a162-4b91-ada5-f6af85d8fe20/manager/0.log" Dec 01 20:14:51 crc kubenswrapper[4935]: I1201 20:14:51.064106 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-868fc9df76-9f6cb_c4a7a2a7-3e74-4d52-a597-2c85b31e9bc4/operator/0.log" Dec 01 20:14:51 crc kubenswrapper[4935]: I1201 20:14:51.078565 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-nfg25_323c3307-d34a-4502-8a68-cef37832f834/registry-server/0.log" Dec 01 20:14:51 crc kubenswrapper[4935]: I1201 20:14:51.387323 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-wmr2q_0fa3cc8f-0a56-4bec-8afb-3fb3599fb222/kube-rbac-proxy/0.log" Dec 01 20:14:51 crc kubenswrapper[4935]: I1201 20:14:51.468271 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-wmr2q_0fa3cc8f-0a56-4bec-8afb-3fb3599fb222/manager/0.log" Dec 01 20:14:51 crc kubenswrapper[4935]: I1201 20:14:51.652684 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-kg6tq_50e604ea-ddfe-470b-bbbf-b65a5948d9d7/kube-rbac-proxy/0.log" Dec 01 20:14:51 crc kubenswrapper[4935]: I1201 20:14:51.815738 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-kg6tq_50e604ea-ddfe-470b-bbbf-b65a5948d9d7/manager/0.log" Dec 01 20:14:51 crc kubenswrapper[4935]: I1201 20:14:51.885290 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-6wpv9_367ed696-aab2-40a9-bdd8-04b4a5e928d0/operator/0.log" Dec 01 20:14:52 crc kubenswrapper[4935]: I1201 20:14:52.010951 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-slbss_d834cbf1-7527-4530-94ca-a0188780da7d/kube-rbac-proxy/0.log" Dec 01 20:14:52 crc kubenswrapper[4935]: I1201 20:14:52.080279 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-slbss_d834cbf1-7527-4530-94ca-a0188780da7d/manager/0.log" Dec 01 20:14:52 crc kubenswrapper[4935]: I1201 20:14:52.194988 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-7445b68fd8-4tjzb_6d09f2a0-653e-417a-8fee-53935bc27816/kube-rbac-proxy/0.log" Dec 01 20:14:52 crc kubenswrapper[4935]: I1201 20:14:52.350784 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-96bb7f5d4-59p62_96a8d8bc-bf66-4fb7-a5bf-b84b22c2c325/manager/0.log" Dec 01 20:14:52 crc kubenswrapper[4935]: I1201 20:14:52.474426 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-mhqkw_e553c27c-e8f0-4617-a914-46c8b5cfc33b/kube-rbac-proxy/0.log" Dec 01 20:14:52 crc kubenswrapper[4935]: I1201 20:14:52.480849 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-mhqkw_e553c27c-e8f0-4617-a914-46c8b5cfc33b/manager/0.log" Dec 01 20:14:52 crc kubenswrapper[4935]: I1201 20:14:52.614824 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-7445b68fd8-4tjzb_6d09f2a0-653e-417a-8fee-53935bc27816/manager/0.log" Dec 01 20:14:52 crc kubenswrapper[4935]: I1201 20:14:52.705009 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-82j75_c5fb0811-5cb8-4bff-927c-99f4e08b8ae0/kube-rbac-proxy/0.log" Dec 01 20:14:52 crc kubenswrapper[4935]: I1201 20:14:52.709535 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-82j75_c5fb0811-5cb8-4bff-927c-99f4e08b8ae0/manager/0.log" Dec 01 20:14:54 crc kubenswrapper[4935]: I1201 20:14:54.346201 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:14:54 crc kubenswrapper[4935]: I1201 20:14:54.346608 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.238070 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5"] Dec 01 20:15:00 crc kubenswrapper[4935]: E1201 20:15:00.239338 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4e71fc-3265-4929-a37d-219bf83d02cb" containerName="registry-server" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.239358 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4e71fc-3265-4929-a37d-219bf83d02cb" containerName="registry-server" Dec 01 20:15:00 crc kubenswrapper[4935]: E1201 20:15:00.239391 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4e71fc-3265-4929-a37d-219bf83d02cb" containerName="extract-utilities" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.239401 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4e71fc-3265-4929-a37d-219bf83d02cb" containerName="extract-utilities" Dec 01 20:15:00 crc kubenswrapper[4935]: E1201 20:15:00.239448 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e4e71fc-3265-4929-a37d-219bf83d02cb" containerName="extract-content" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.239457 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e4e71fc-3265-4929-a37d-219bf83d02cb" containerName="extract-content" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.239814 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e4e71fc-3265-4929-a37d-219bf83d02cb" containerName="registry-server" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.240925 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.243024 4935 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.243323 4935 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.255963 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5"] Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.322025 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/134c09cf-0a0f-4343-8207-1f7a56b13ded-secret-volume\") pod \"collect-profiles-29410335-99zw5\" (UID: \"134c09cf-0a0f-4343-8207-1f7a56b13ded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.322111 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/134c09cf-0a0f-4343-8207-1f7a56b13ded-config-volume\") pod \"collect-profiles-29410335-99zw5\" (UID: \"134c09cf-0a0f-4343-8207-1f7a56b13ded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.322499 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqxlj\" (UniqueName: \"kubernetes.io/projected/134c09cf-0a0f-4343-8207-1f7a56b13ded-kube-api-access-rqxlj\") pod \"collect-profiles-29410335-99zw5\" (UID: \"134c09cf-0a0f-4343-8207-1f7a56b13ded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.425007 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqxlj\" (UniqueName: \"kubernetes.io/projected/134c09cf-0a0f-4343-8207-1f7a56b13ded-kube-api-access-rqxlj\") pod \"collect-profiles-29410335-99zw5\" (UID: \"134c09cf-0a0f-4343-8207-1f7a56b13ded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.425097 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/134c09cf-0a0f-4343-8207-1f7a56b13ded-secret-volume\") pod \"collect-profiles-29410335-99zw5\" (UID: \"134c09cf-0a0f-4343-8207-1f7a56b13ded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.425134 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/134c09cf-0a0f-4343-8207-1f7a56b13ded-config-volume\") pod \"collect-profiles-29410335-99zw5\" (UID: \"134c09cf-0a0f-4343-8207-1f7a56b13ded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.426293 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/134c09cf-0a0f-4343-8207-1f7a56b13ded-config-volume\") pod \"collect-profiles-29410335-99zw5\" (UID: \"134c09cf-0a0f-4343-8207-1f7a56b13ded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.440821 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/134c09cf-0a0f-4343-8207-1f7a56b13ded-secret-volume\") pod \"collect-profiles-29410335-99zw5\" (UID: \"134c09cf-0a0f-4343-8207-1f7a56b13ded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.443129 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqxlj\" (UniqueName: \"kubernetes.io/projected/134c09cf-0a0f-4343-8207-1f7a56b13ded-kube-api-access-rqxlj\") pod \"collect-profiles-29410335-99zw5\" (UID: \"134c09cf-0a0f-4343-8207-1f7a56b13ded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" Dec 01 20:15:00 crc kubenswrapper[4935]: I1201 20:15:00.568656 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" Dec 01 20:15:01 crc kubenswrapper[4935]: I1201 20:15:01.163939 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5"] Dec 01 20:15:01 crc kubenswrapper[4935]: I1201 20:15:01.870867 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" event={"ID":"134c09cf-0a0f-4343-8207-1f7a56b13ded","Type":"ContainerStarted","Data":"5135a96c51ac47263e9676784f1b88ebeca63dd9d985465d25d451f02f3dbc9d"} Dec 01 20:15:01 crc kubenswrapper[4935]: I1201 20:15:01.871396 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" event={"ID":"134c09cf-0a0f-4343-8207-1f7a56b13ded","Type":"ContainerStarted","Data":"08b2c739e6974ffde8f40972b9b8a1ebc8c1cb83f1cd179f3567167686574caf"} Dec 01 20:15:01 crc kubenswrapper[4935]: I1201 20:15:01.896541 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" podStartSLOduration=1.8965181580000001 podStartE2EDuration="1.896518158s" podCreationTimestamp="2025-12-01 20:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:15:01.887510089 +0000 UTC m=+6315.909139348" watchObservedRunningTime="2025-12-01 20:15:01.896518158 +0000 UTC m=+6315.918147417" Dec 01 20:15:02 crc kubenswrapper[4935]: I1201 20:15:02.890885 4935 generic.go:334] "Generic (PLEG): container finished" podID="134c09cf-0a0f-4343-8207-1f7a56b13ded" containerID="5135a96c51ac47263e9676784f1b88ebeca63dd9d985465d25d451f02f3dbc9d" exitCode=0 Dec 01 20:15:02 crc kubenswrapper[4935]: I1201 20:15:02.891461 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" event={"ID":"134c09cf-0a0f-4343-8207-1f7a56b13ded","Type":"ContainerDied","Data":"5135a96c51ac47263e9676784f1b88ebeca63dd9d985465d25d451f02f3dbc9d"} Dec 01 20:15:04 crc kubenswrapper[4935]: I1201 20:15:04.348917 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" Dec 01 20:15:04 crc kubenswrapper[4935]: I1201 20:15:04.423075 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqxlj\" (UniqueName: \"kubernetes.io/projected/134c09cf-0a0f-4343-8207-1f7a56b13ded-kube-api-access-rqxlj\") pod \"134c09cf-0a0f-4343-8207-1f7a56b13ded\" (UID: \"134c09cf-0a0f-4343-8207-1f7a56b13ded\") " Dec 01 20:15:04 crc kubenswrapper[4935]: I1201 20:15:04.423158 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/134c09cf-0a0f-4343-8207-1f7a56b13ded-secret-volume\") pod \"134c09cf-0a0f-4343-8207-1f7a56b13ded\" (UID: \"134c09cf-0a0f-4343-8207-1f7a56b13ded\") " Dec 01 20:15:04 crc kubenswrapper[4935]: I1201 20:15:04.423446 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/134c09cf-0a0f-4343-8207-1f7a56b13ded-config-volume\") pod \"134c09cf-0a0f-4343-8207-1f7a56b13ded\" (UID: \"134c09cf-0a0f-4343-8207-1f7a56b13ded\") " Dec 01 20:15:04 crc kubenswrapper[4935]: I1201 20:15:04.423934 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/134c09cf-0a0f-4343-8207-1f7a56b13ded-config-volume" (OuterVolumeSpecName: "config-volume") pod "134c09cf-0a0f-4343-8207-1f7a56b13ded" (UID: "134c09cf-0a0f-4343-8207-1f7a56b13ded"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:15:04 crc kubenswrapper[4935]: I1201 20:15:04.424070 4935 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/134c09cf-0a0f-4343-8207-1f7a56b13ded-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:04 crc kubenswrapper[4935]: I1201 20:15:04.429343 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/134c09cf-0a0f-4343-8207-1f7a56b13ded-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "134c09cf-0a0f-4343-8207-1f7a56b13ded" (UID: "134c09cf-0a0f-4343-8207-1f7a56b13ded"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:15:04 crc kubenswrapper[4935]: I1201 20:15:04.429566 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/134c09cf-0a0f-4343-8207-1f7a56b13ded-kube-api-access-rqxlj" (OuterVolumeSpecName: "kube-api-access-rqxlj") pod "134c09cf-0a0f-4343-8207-1f7a56b13ded" (UID: "134c09cf-0a0f-4343-8207-1f7a56b13ded"). InnerVolumeSpecName "kube-api-access-rqxlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:15:04 crc kubenswrapper[4935]: I1201 20:15:04.526548 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqxlj\" (UniqueName: \"kubernetes.io/projected/134c09cf-0a0f-4343-8207-1f7a56b13ded-kube-api-access-rqxlj\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:04 crc kubenswrapper[4935]: I1201 20:15:04.526583 4935 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/134c09cf-0a0f-4343-8207-1f7a56b13ded-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:04 crc kubenswrapper[4935]: I1201 20:15:04.929617 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" event={"ID":"134c09cf-0a0f-4343-8207-1f7a56b13ded","Type":"ContainerDied","Data":"08b2c739e6974ffde8f40972b9b8a1ebc8c1cb83f1cd179f3567167686574caf"} Dec 01 20:15:04 crc kubenswrapper[4935]: I1201 20:15:04.930000 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08b2c739e6974ffde8f40972b9b8a1ebc8c1cb83f1cd179f3567167686574caf" Dec 01 20:15:04 crc kubenswrapper[4935]: I1201 20:15:04.929670 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-99zw5" Dec 01 20:15:05 crc kubenswrapper[4935]: I1201 20:15:05.437778 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4"] Dec 01 20:15:05 crc kubenswrapper[4935]: I1201 20:15:05.449676 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410290-2rbm4"] Dec 01 20:15:06 crc kubenswrapper[4935]: I1201 20:15:06.522417 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c4417c7-b634-4c6f-8609-1777156a606a" path="/var/lib/kubelet/pods/2c4417c7-b634-4c6f-8609-1777156a606a/volumes" Dec 01 20:15:10 crc kubenswrapper[4935]: I1201 20:15:10.823126 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-29f7l_ddd2fe2c-4083-4793-81d8-20d7fc05fe3d/control-plane-machine-set-operator/0.log" Dec 01 20:15:11 crc kubenswrapper[4935]: I1201 20:15:11.009551 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-r9pw7_77a79ecb-4a46-43f2-9187-7cd6fc3dc641/kube-rbac-proxy/0.log" Dec 01 20:15:11 crc kubenswrapper[4935]: I1201 20:15:11.038501 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-r9pw7_77a79ecb-4a46-43f2-9187-7cd6fc3dc641/machine-api-operator/0.log" Dec 01 20:15:13 crc kubenswrapper[4935]: I1201 20:15:13.015840 4935 scope.go:117] "RemoveContainer" containerID="5752f3659847b12d6f45318d9f69a4b0dcb70bbd430206193d3e93c462efd247" Dec 01 20:15:24 crc kubenswrapper[4935]: I1201 20:15:24.007173 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-7hwdt_54039121-127f-4ce7-b6d9-3dff080bc6ae/cert-manager-controller/0.log" Dec 01 20:15:24 crc kubenswrapper[4935]: I1201 20:15:24.341394 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-s7jbr_4fba61c9-f590-402c-a1b1-2a861a4b1bb6/cert-manager-cainjector/0.log" Dec 01 20:15:24 crc kubenswrapper[4935]: I1201 20:15:24.346014 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:15:24 crc kubenswrapper[4935]: I1201 20:15:24.346601 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:15:24 crc kubenswrapper[4935]: I1201 20:15:24.425965 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-k9tw2_1ee1742c-1268-40ba-a472-ca0184dd5fae/cert-manager-webhook/0.log" Dec 01 20:15:37 crc kubenswrapper[4935]: I1201 20:15:37.076269 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-7ncpl_7b73393b-b0a5-4ec7-8854-b7a0d5cbe268/nmstate-console-plugin/0.log" Dec 01 20:15:37 crc kubenswrapper[4935]: I1201 20:15:37.280884 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-2vqsx_d347948e-508e-4aeb-b082-c2d0f48ebace/kube-rbac-proxy/0.log" Dec 01 20:15:37 crc kubenswrapper[4935]: I1201 20:15:37.300844 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-tjcvm_e2829c29-7d31-4124-87aa-e2eff8f2653c/nmstate-handler/0.log" Dec 01 20:15:37 crc kubenswrapper[4935]: I1201 20:15:37.354805 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-2vqsx_d347948e-508e-4aeb-b082-c2d0f48ebace/nmstate-metrics/0.log" Dec 01 20:15:37 crc kubenswrapper[4935]: I1201 20:15:37.470780 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-bcgrg_2d0137b6-1254-4d31-a0fe-f8dc12b012f1/nmstate-operator/0.log" Dec 01 20:15:37 crc kubenswrapper[4935]: I1201 20:15:37.576043 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-n6zfj_3a58c39c-42c3-472c-8b8c-725b44b7ae0e/nmstate-webhook/0.log" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.289731 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-q7wcv"] Dec 01 20:15:43 crc kubenswrapper[4935]: E1201 20:15:43.290980 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="134c09cf-0a0f-4343-8207-1f7a56b13ded" containerName="collect-profiles" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.290994 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="134c09cf-0a0f-4343-8207-1f7a56b13ded" containerName="collect-profiles" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.291235 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="134c09cf-0a0f-4343-8207-1f7a56b13ded" containerName="collect-profiles" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.293046 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.304674 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q7wcv"] Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.398784 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51a17ff-3335-49cb-aef0-a39021470de1-catalog-content\") pod \"redhat-marketplace-q7wcv\" (UID: \"e51a17ff-3335-49cb-aef0-a39021470de1\") " pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.398999 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nphb\" (UniqueName: \"kubernetes.io/projected/e51a17ff-3335-49cb-aef0-a39021470de1-kube-api-access-8nphb\") pod \"redhat-marketplace-q7wcv\" (UID: \"e51a17ff-3335-49cb-aef0-a39021470de1\") " pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.399386 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51a17ff-3335-49cb-aef0-a39021470de1-utilities\") pod \"redhat-marketplace-q7wcv\" (UID: \"e51a17ff-3335-49cb-aef0-a39021470de1\") " pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.502025 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nphb\" (UniqueName: \"kubernetes.io/projected/e51a17ff-3335-49cb-aef0-a39021470de1-kube-api-access-8nphb\") pod \"redhat-marketplace-q7wcv\" (UID: \"e51a17ff-3335-49cb-aef0-a39021470de1\") " pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.502495 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51a17ff-3335-49cb-aef0-a39021470de1-utilities\") pod \"redhat-marketplace-q7wcv\" (UID: \"e51a17ff-3335-49cb-aef0-a39021470de1\") " pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.502835 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51a17ff-3335-49cb-aef0-a39021470de1-catalog-content\") pod \"redhat-marketplace-q7wcv\" (UID: \"e51a17ff-3335-49cb-aef0-a39021470de1\") " pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.502980 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51a17ff-3335-49cb-aef0-a39021470de1-utilities\") pod \"redhat-marketplace-q7wcv\" (UID: \"e51a17ff-3335-49cb-aef0-a39021470de1\") " pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.503326 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51a17ff-3335-49cb-aef0-a39021470de1-catalog-content\") pod \"redhat-marketplace-q7wcv\" (UID: \"e51a17ff-3335-49cb-aef0-a39021470de1\") " pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.527738 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nphb\" (UniqueName: \"kubernetes.io/projected/e51a17ff-3335-49cb-aef0-a39021470de1-kube-api-access-8nphb\") pod \"redhat-marketplace-q7wcv\" (UID: \"e51a17ff-3335-49cb-aef0-a39021470de1\") " pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:43 crc kubenswrapper[4935]: I1201 20:15:43.625315 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:44 crc kubenswrapper[4935]: I1201 20:15:44.154335 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q7wcv"] Dec 01 20:15:44 crc kubenswrapper[4935]: I1201 20:15:44.387089 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7wcv" event={"ID":"e51a17ff-3335-49cb-aef0-a39021470de1","Type":"ContainerStarted","Data":"d9892d3050d14a085000ffca5d8e5166574e770650254106520edd01510fc271"} Dec 01 20:15:44 crc kubenswrapper[4935]: I1201 20:15:44.387470 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7wcv" event={"ID":"e51a17ff-3335-49cb-aef0-a39021470de1","Type":"ContainerStarted","Data":"47bde2781eb1aaf46c322b692123bcff69d66f470328163bb076244af2f60e28"} Dec 01 20:15:45 crc kubenswrapper[4935]: I1201 20:15:45.401664 4935 generic.go:334] "Generic (PLEG): container finished" podID="e51a17ff-3335-49cb-aef0-a39021470de1" containerID="d9892d3050d14a085000ffca5d8e5166574e770650254106520edd01510fc271" exitCode=0 Dec 01 20:15:45 crc kubenswrapper[4935]: I1201 20:15:45.401769 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7wcv" event={"ID":"e51a17ff-3335-49cb-aef0-a39021470de1","Type":"ContainerDied","Data":"d9892d3050d14a085000ffca5d8e5166574e770650254106520edd01510fc271"} Dec 01 20:15:45 crc kubenswrapper[4935]: I1201 20:15:45.404292 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:15:46 crc kubenswrapper[4935]: I1201 20:15:46.416138 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7wcv" event={"ID":"e51a17ff-3335-49cb-aef0-a39021470de1","Type":"ContainerStarted","Data":"a85545d3ae29732fc1db574278d4dbcae235fa2dc9860e2cf9380b4278be6f7c"} Dec 01 20:15:47 crc kubenswrapper[4935]: I1201 20:15:47.427475 4935 generic.go:334] "Generic (PLEG): container finished" podID="e51a17ff-3335-49cb-aef0-a39021470de1" containerID="a85545d3ae29732fc1db574278d4dbcae235fa2dc9860e2cf9380b4278be6f7c" exitCode=0 Dec 01 20:15:47 crc kubenswrapper[4935]: I1201 20:15:47.427586 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7wcv" event={"ID":"e51a17ff-3335-49cb-aef0-a39021470de1","Type":"ContainerDied","Data":"a85545d3ae29732fc1db574278d4dbcae235fa2dc9860e2cf9380b4278be6f7c"} Dec 01 20:15:50 crc kubenswrapper[4935]: I1201 20:15:50.503914 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7wcv" event={"ID":"e51a17ff-3335-49cb-aef0-a39021470de1","Type":"ContainerStarted","Data":"ea1854395ad822e73fa0cff4cf47b7be6cab55d50d43236a0458a4bf858eee4f"} Dec 01 20:15:50 crc kubenswrapper[4935]: I1201 20:15:50.556107 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-q7wcv" podStartSLOduration=3.629026101 podStartE2EDuration="7.55608746s" podCreationTimestamp="2025-12-01 20:15:43 +0000 UTC" firstStartedPulling="2025-12-01 20:15:45.404043194 +0000 UTC m=+6359.425672453" lastFinishedPulling="2025-12-01 20:15:49.331104553 +0000 UTC m=+6363.352733812" observedRunningTime="2025-12-01 20:15:50.536775712 +0000 UTC m=+6364.558405001" watchObservedRunningTime="2025-12-01 20:15:50.55608746 +0000 UTC m=+6364.577716719" Dec 01 20:15:52 crc kubenswrapper[4935]: I1201 20:15:52.039002 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-59df859d46-rd5lg_7c66dc44-1b72-4e23-9421-8f8495e7af3a/manager/0.log" Dec 01 20:15:52 crc kubenswrapper[4935]: I1201 20:15:52.039036 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-59df859d46-rd5lg_7c66dc44-1b72-4e23-9421-8f8495e7af3a/kube-rbac-proxy/0.log" Dec 01 20:15:53 crc kubenswrapper[4935]: I1201 20:15:53.797378 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:53 crc kubenswrapper[4935]: I1201 20:15:53.797693 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:53 crc kubenswrapper[4935]: I1201 20:15:53.860603 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:54 crc kubenswrapper[4935]: I1201 20:15:54.346358 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:15:54 crc kubenswrapper[4935]: I1201 20:15:54.346420 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:15:54 crc kubenswrapper[4935]: I1201 20:15:54.346468 4935 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" Dec 01 20:15:54 crc kubenswrapper[4935]: I1201 20:15:54.347467 4935 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b"} pod="openshift-machine-config-operator/machine-config-daemon-zznnp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:15:54 crc kubenswrapper[4935]: I1201 20:15:54.347536 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" containerID="cri-o://18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" gracePeriod=600 Dec 01 20:15:54 crc kubenswrapper[4935]: E1201 20:15:54.618979 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:15:54 crc kubenswrapper[4935]: I1201 20:15:54.838704 4935 generic.go:334] "Generic (PLEG): container finished" podID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" exitCode=0 Dec 01 20:15:54 crc kubenswrapper[4935]: I1201 20:15:54.839821 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerDied","Data":"18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b"} Dec 01 20:15:54 crc kubenswrapper[4935]: I1201 20:15:54.839867 4935 scope.go:117] "RemoveContainer" containerID="7accc577ff3ee35e634014a97efd720d6261296da14379225b5efcb18ad23775" Dec 01 20:15:54 crc kubenswrapper[4935]: I1201 20:15:54.841206 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:15:54 crc kubenswrapper[4935]: E1201 20:15:54.841649 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:15:54 crc kubenswrapper[4935]: I1201 20:15:54.934896 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:54 crc kubenswrapper[4935]: I1201 20:15:54.996016 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q7wcv"] Dec 01 20:15:56 crc kubenswrapper[4935]: I1201 20:15:56.857301 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-q7wcv" podUID="e51a17ff-3335-49cb-aef0-a39021470de1" containerName="registry-server" containerID="cri-o://ea1854395ad822e73fa0cff4cf47b7be6cab55d50d43236a0458a4bf858eee4f" gracePeriod=2 Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.467867 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.592792 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51a17ff-3335-49cb-aef0-a39021470de1-catalog-content\") pod \"e51a17ff-3335-49cb-aef0-a39021470de1\" (UID: \"e51a17ff-3335-49cb-aef0-a39021470de1\") " Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.592912 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nphb\" (UniqueName: \"kubernetes.io/projected/e51a17ff-3335-49cb-aef0-a39021470de1-kube-api-access-8nphb\") pod \"e51a17ff-3335-49cb-aef0-a39021470de1\" (UID: \"e51a17ff-3335-49cb-aef0-a39021470de1\") " Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.593061 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51a17ff-3335-49cb-aef0-a39021470de1-utilities\") pod \"e51a17ff-3335-49cb-aef0-a39021470de1\" (UID: \"e51a17ff-3335-49cb-aef0-a39021470de1\") " Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.594707 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e51a17ff-3335-49cb-aef0-a39021470de1-utilities" (OuterVolumeSpecName: "utilities") pod "e51a17ff-3335-49cb-aef0-a39021470de1" (UID: "e51a17ff-3335-49cb-aef0-a39021470de1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.599513 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e51a17ff-3335-49cb-aef0-a39021470de1-kube-api-access-8nphb" (OuterVolumeSpecName: "kube-api-access-8nphb") pod "e51a17ff-3335-49cb-aef0-a39021470de1" (UID: "e51a17ff-3335-49cb-aef0-a39021470de1"). InnerVolumeSpecName "kube-api-access-8nphb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.610609 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e51a17ff-3335-49cb-aef0-a39021470de1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e51a17ff-3335-49cb-aef0-a39021470de1" (UID: "e51a17ff-3335-49cb-aef0-a39021470de1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.696486 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nphb\" (UniqueName: \"kubernetes.io/projected/e51a17ff-3335-49cb-aef0-a39021470de1-kube-api-access-8nphb\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.696526 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51a17ff-3335-49cb-aef0-a39021470de1-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.696539 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51a17ff-3335-49cb-aef0-a39021470de1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.878434 4935 generic.go:334] "Generic (PLEG): container finished" podID="e51a17ff-3335-49cb-aef0-a39021470de1" containerID="ea1854395ad822e73fa0cff4cf47b7be6cab55d50d43236a0458a4bf858eee4f" exitCode=0 Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.878477 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7wcv" event={"ID":"e51a17ff-3335-49cb-aef0-a39021470de1","Type":"ContainerDied","Data":"ea1854395ad822e73fa0cff4cf47b7be6cab55d50d43236a0458a4bf858eee4f"} Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.878503 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7wcv" event={"ID":"e51a17ff-3335-49cb-aef0-a39021470de1","Type":"ContainerDied","Data":"47bde2781eb1aaf46c322b692123bcff69d66f470328163bb076244af2f60e28"} Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.878519 4935 scope.go:117] "RemoveContainer" containerID="ea1854395ad822e73fa0cff4cf47b7be6cab55d50d43236a0458a4bf858eee4f" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.878530 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q7wcv" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.915303 4935 scope.go:117] "RemoveContainer" containerID="a85545d3ae29732fc1db574278d4dbcae235fa2dc9860e2cf9380b4278be6f7c" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.925222 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q7wcv"] Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.939778 4935 scope.go:117] "RemoveContainer" containerID="d9892d3050d14a085000ffca5d8e5166574e770650254106520edd01510fc271" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.940301 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-q7wcv"] Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.992332 4935 scope.go:117] "RemoveContainer" containerID="ea1854395ad822e73fa0cff4cf47b7be6cab55d50d43236a0458a4bf858eee4f" Dec 01 20:15:57 crc kubenswrapper[4935]: E1201 20:15:57.992931 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea1854395ad822e73fa0cff4cf47b7be6cab55d50d43236a0458a4bf858eee4f\": container with ID starting with ea1854395ad822e73fa0cff4cf47b7be6cab55d50d43236a0458a4bf858eee4f not found: ID does not exist" containerID="ea1854395ad822e73fa0cff4cf47b7be6cab55d50d43236a0458a4bf858eee4f" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.992969 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea1854395ad822e73fa0cff4cf47b7be6cab55d50d43236a0458a4bf858eee4f"} err="failed to get container status \"ea1854395ad822e73fa0cff4cf47b7be6cab55d50d43236a0458a4bf858eee4f\": rpc error: code = NotFound desc = could not find container \"ea1854395ad822e73fa0cff4cf47b7be6cab55d50d43236a0458a4bf858eee4f\": container with ID starting with ea1854395ad822e73fa0cff4cf47b7be6cab55d50d43236a0458a4bf858eee4f not found: ID does not exist" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.992998 4935 scope.go:117] "RemoveContainer" containerID="a85545d3ae29732fc1db574278d4dbcae235fa2dc9860e2cf9380b4278be6f7c" Dec 01 20:15:57 crc kubenswrapper[4935]: E1201 20:15:57.993398 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a85545d3ae29732fc1db574278d4dbcae235fa2dc9860e2cf9380b4278be6f7c\": container with ID starting with a85545d3ae29732fc1db574278d4dbcae235fa2dc9860e2cf9380b4278be6f7c not found: ID does not exist" containerID="a85545d3ae29732fc1db574278d4dbcae235fa2dc9860e2cf9380b4278be6f7c" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.993432 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a85545d3ae29732fc1db574278d4dbcae235fa2dc9860e2cf9380b4278be6f7c"} err="failed to get container status \"a85545d3ae29732fc1db574278d4dbcae235fa2dc9860e2cf9380b4278be6f7c\": rpc error: code = NotFound desc = could not find container \"a85545d3ae29732fc1db574278d4dbcae235fa2dc9860e2cf9380b4278be6f7c\": container with ID starting with a85545d3ae29732fc1db574278d4dbcae235fa2dc9860e2cf9380b4278be6f7c not found: ID does not exist" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.993452 4935 scope.go:117] "RemoveContainer" containerID="d9892d3050d14a085000ffca5d8e5166574e770650254106520edd01510fc271" Dec 01 20:15:57 crc kubenswrapper[4935]: E1201 20:15:57.993666 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9892d3050d14a085000ffca5d8e5166574e770650254106520edd01510fc271\": container with ID starting with d9892d3050d14a085000ffca5d8e5166574e770650254106520edd01510fc271 not found: ID does not exist" containerID="d9892d3050d14a085000ffca5d8e5166574e770650254106520edd01510fc271" Dec 01 20:15:57 crc kubenswrapper[4935]: I1201 20:15:57.993692 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9892d3050d14a085000ffca5d8e5166574e770650254106520edd01510fc271"} err="failed to get container status \"d9892d3050d14a085000ffca5d8e5166574e770650254106520edd01510fc271\": rpc error: code = NotFound desc = could not find container \"d9892d3050d14a085000ffca5d8e5166574e770650254106520edd01510fc271\": container with ID starting with d9892d3050d14a085000ffca5d8e5166574e770650254106520edd01510fc271 not found: ID does not exist" Dec 01 20:15:58 crc kubenswrapper[4935]: I1201 20:15:58.523620 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e51a17ff-3335-49cb-aef0-a39021470de1" path="/var/lib/kubelet/pods/e51a17ff-3335-49cb-aef0-a39021470de1/volumes" Dec 01 20:16:07 crc kubenswrapper[4935]: I1201 20:16:07.805539 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_cluster-logging-operator-ff9846bd-92jqh_f8e2ad20-1223-4cd9-bfe7-72bbc774226f/cluster-logging-operator/0.log" Dec 01 20:16:07 crc kubenswrapper[4935]: I1201 20:16:07.981694 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_collector-qlgrt_b40b5d13-a124-4ee9-a16e-cb21e2fcd047/collector/0.log" Dec 01 20:16:08 crc kubenswrapper[4935]: I1201 20:16:08.085449 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-compactor-0_47cd7efb-bd80-437c-b921-03dc4d3ee011/loki-compactor/0.log" Dec 01 20:16:08 crc kubenswrapper[4935]: I1201 20:16:08.209003 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-distributor-76cc67bf56-rvtlb_4c7e5318-7492-4aee-9738-c02c693a1ccd/loki-distributor/0.log" Dec 01 20:16:08 crc kubenswrapper[4935]: I1201 20:16:08.298257 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-6bd679557b-fq7qm_e4a3ab20-1697-4acf-9d3c-14037e5a78bd/opa/0.log" Dec 01 20:16:08 crc kubenswrapper[4935]: I1201 20:16:08.304871 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-6bd679557b-fq7qm_e4a3ab20-1697-4acf-9d3c-14037e5a78bd/gateway/0.log" Dec 01 20:16:08 crc kubenswrapper[4935]: I1201 20:16:08.469769 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-6bd679557b-vs7f7_8b4b5c73-8c85-42ec-88f4-9b703996e4c7/gateway/0.log" Dec 01 20:16:08 crc kubenswrapper[4935]: I1201 20:16:08.472370 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-6bd679557b-vs7f7_8b4b5c73-8c85-42ec-88f4-9b703996e4c7/opa/0.log" Dec 01 20:16:08 crc kubenswrapper[4935]: I1201 20:16:08.508944 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:16:08 crc kubenswrapper[4935]: E1201 20:16:08.509396 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:16:08 crc kubenswrapper[4935]: I1201 20:16:08.870403 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-index-gateway-0_23e6d00c-6fc5-4564-b9a4-4357c10cc65e/loki-index-gateway/0.log" Dec 01 20:16:09 crc kubenswrapper[4935]: I1201 20:16:09.017213 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-ingester-0_cc008841-9147-4cd8-894b-e54127c2a4ab/loki-ingester/0.log" Dec 01 20:16:09 crc kubenswrapper[4935]: I1201 20:16:09.070587 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-querier-5895d59bb8-hkjzj_5bde6947-75e2-4f05-b403-f010444ce0b8/loki-querier/0.log" Dec 01 20:16:09 crc kubenswrapper[4935]: I1201 20:16:09.169733 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-query-frontend-84558f7c9f-jl8x2_7f489c6c-5824-4f46-8bda-7363d4b1d1e4/loki-query-frontend/0.log" Dec 01 20:16:19 crc kubenswrapper[4935]: I1201 20:16:19.508039 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:16:19 crc kubenswrapper[4935]: E1201 20:16:19.508964 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:16:23 crc kubenswrapper[4935]: I1201 20:16:23.374537 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-zhs5b_3c2a298c-be4d-4b96-82f2-78df48943e0e/kube-rbac-proxy/0.log" Dec 01 20:16:23 crc kubenswrapper[4935]: I1201 20:16:23.522076 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-zhs5b_3c2a298c-be4d-4b96-82f2-78df48943e0e/controller/0.log" Dec 01 20:16:23 crc kubenswrapper[4935]: I1201 20:16:23.553334 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/cp-frr-files/0.log" Dec 01 20:16:23 crc kubenswrapper[4935]: I1201 20:16:23.832627 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/cp-frr-files/0.log" Dec 01 20:16:23 crc kubenswrapper[4935]: I1201 20:16:23.842231 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/cp-reloader/0.log" Dec 01 20:16:23 crc kubenswrapper[4935]: I1201 20:16:23.870722 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/cp-reloader/0.log" Dec 01 20:16:23 crc kubenswrapper[4935]: I1201 20:16:23.878294 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/cp-metrics/0.log" Dec 01 20:16:24 crc kubenswrapper[4935]: I1201 20:16:24.048169 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/cp-reloader/0.log" Dec 01 20:16:24 crc kubenswrapper[4935]: I1201 20:16:24.057602 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/cp-frr-files/0.log" Dec 01 20:16:24 crc kubenswrapper[4935]: I1201 20:16:24.127643 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/cp-metrics/0.log" Dec 01 20:16:24 crc kubenswrapper[4935]: I1201 20:16:24.150402 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/cp-metrics/0.log" Dec 01 20:16:24 crc kubenswrapper[4935]: I1201 20:16:24.460231 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/cp-frr-files/0.log" Dec 01 20:16:24 crc kubenswrapper[4935]: I1201 20:16:24.489572 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/cp-reloader/0.log" Dec 01 20:16:24 crc kubenswrapper[4935]: I1201 20:16:24.510456 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/controller/0.log" Dec 01 20:16:24 crc kubenswrapper[4935]: I1201 20:16:24.533670 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/cp-metrics/0.log" Dec 01 20:16:24 crc kubenswrapper[4935]: I1201 20:16:24.697573 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/kube-rbac-proxy/0.log" Dec 01 20:16:24 crc kubenswrapper[4935]: I1201 20:16:24.758028 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/frr-metrics/0.log" Dec 01 20:16:24 crc kubenswrapper[4935]: I1201 20:16:24.780729 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/kube-rbac-proxy-frr/0.log" Dec 01 20:16:24 crc kubenswrapper[4935]: I1201 20:16:24.970722 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/reloader/0.log" Dec 01 20:16:25 crc kubenswrapper[4935]: I1201 20:16:25.062816 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-6st2l_0858f09c-2e20-4861-af53-c1df064a5c48/frr-k8s-webhook-server/0.log" Dec 01 20:16:25 crc kubenswrapper[4935]: I1201 20:16:25.285699 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-788fff765c-47cxx_6e01c9df-7ad5-47f7-82b8-3886841341a8/manager/0.log" Dec 01 20:16:25 crc kubenswrapper[4935]: I1201 20:16:25.536694 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7687bf949b-vrmk6_7195d61c-817c-43c7-8cc4-09ac712ba59f/webhook-server/0.log" Dec 01 20:16:25 crc kubenswrapper[4935]: I1201 20:16:25.611783 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fwmt4_0c91c45c-1d3c-458a-9725-fcf4529e5db1/kube-rbac-proxy/0.log" Dec 01 20:16:26 crc kubenswrapper[4935]: I1201 20:16:26.301312 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fwmt4_0c91c45c-1d3c-458a-9725-fcf4529e5db1/speaker/0.log" Dec 01 20:16:26 crc kubenswrapper[4935]: I1201 20:16:26.731880 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-5zlbd_65bd8eca-d900-4b1b-a859-bcbde52e4bea/frr/0.log" Dec 01 20:16:31 crc kubenswrapper[4935]: I1201 20:16:31.507849 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:16:31 crc kubenswrapper[4935]: E1201 20:16:31.508634 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:16:39 crc kubenswrapper[4935]: I1201 20:16:39.771229 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj_e1a58ea7-d1ba-46f8-aef1-784fd0b59622/util/0.log" Dec 01 20:16:40 crc kubenswrapper[4935]: I1201 20:16:40.005421 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj_e1a58ea7-d1ba-46f8-aef1-784fd0b59622/pull/0.log" Dec 01 20:16:40 crc kubenswrapper[4935]: I1201 20:16:40.038108 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj_e1a58ea7-d1ba-46f8-aef1-784fd0b59622/util/0.log" Dec 01 20:16:40 crc kubenswrapper[4935]: I1201 20:16:40.054973 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj_e1a58ea7-d1ba-46f8-aef1-784fd0b59622/pull/0.log" Dec 01 20:16:40 crc kubenswrapper[4935]: I1201 20:16:40.238728 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj_e1a58ea7-d1ba-46f8-aef1-784fd0b59622/extract/0.log" Dec 01 20:16:40 crc kubenswrapper[4935]: I1201 20:16:40.240830 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj_e1a58ea7-d1ba-46f8-aef1-784fd0b59622/util/0.log" Dec 01 20:16:40 crc kubenswrapper[4935]: I1201 20:16:40.241113 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb8pjkpj_e1a58ea7-d1ba-46f8-aef1-784fd0b59622/pull/0.log" Dec 01 20:16:40 crc kubenswrapper[4935]: I1201 20:16:40.404360 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj_f58dd92e-7424-4a8b-a842-54d631dffd17/util/0.log" Dec 01 20:16:40 crc kubenswrapper[4935]: I1201 20:16:40.638976 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj_f58dd92e-7424-4a8b-a842-54d631dffd17/pull/0.log" Dec 01 20:16:40 crc kubenswrapper[4935]: I1201 20:16:40.644603 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj_f58dd92e-7424-4a8b-a842-54d631dffd17/util/0.log" Dec 01 20:16:40 crc kubenswrapper[4935]: I1201 20:16:40.703849 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj_f58dd92e-7424-4a8b-a842-54d631dffd17/pull/0.log" Dec 01 20:16:40 crc kubenswrapper[4935]: I1201 20:16:40.902168 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj_f58dd92e-7424-4a8b-a842-54d631dffd17/pull/0.log" Dec 01 20:16:40 crc kubenswrapper[4935]: I1201 20:16:40.923470 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj_f58dd92e-7424-4a8b-a842-54d631dffd17/util/0.log" Dec 01 20:16:40 crc kubenswrapper[4935]: I1201 20:16:40.958702 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f49clj_f58dd92e-7424-4a8b-a842-54d631dffd17/extract/0.log" Dec 01 20:16:41 crc kubenswrapper[4935]: I1201 20:16:41.160174 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk_725a954a-2f06-4951-bfe5-c4db016352ca/util/0.log" Dec 01 20:16:41 crc kubenswrapper[4935]: I1201 20:16:41.244289 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk_725a954a-2f06-4951-bfe5-c4db016352ca/util/0.log" Dec 01 20:16:41 crc kubenswrapper[4935]: I1201 20:16:41.304411 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk_725a954a-2f06-4951-bfe5-c4db016352ca/pull/0.log" Dec 01 20:16:41 crc kubenswrapper[4935]: I1201 20:16:41.337931 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk_725a954a-2f06-4951-bfe5-c4db016352ca/pull/0.log" Dec 01 20:16:41 crc kubenswrapper[4935]: I1201 20:16:41.473874 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk_725a954a-2f06-4951-bfe5-c4db016352ca/util/0.log" Dec 01 20:16:41 crc kubenswrapper[4935]: I1201 20:16:41.494339 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk_725a954a-2f06-4951-bfe5-c4db016352ca/pull/0.log" Dec 01 20:16:41 crc kubenswrapper[4935]: I1201 20:16:41.529343 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210b5fhk_725a954a-2f06-4951-bfe5-c4db016352ca/extract/0.log" Dec 01 20:16:41 crc kubenswrapper[4935]: I1201 20:16:41.661328 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796_da239c9b-9e88-43ab-8967-36662c93340c/util/0.log" Dec 01 20:16:41 crc kubenswrapper[4935]: I1201 20:16:41.855352 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796_da239c9b-9e88-43ab-8967-36662c93340c/util/0.log" Dec 01 20:16:41 crc kubenswrapper[4935]: I1201 20:16:41.856663 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796_da239c9b-9e88-43ab-8967-36662c93340c/pull/0.log" Dec 01 20:16:41 crc kubenswrapper[4935]: I1201 20:16:41.896173 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796_da239c9b-9e88-43ab-8967-36662c93340c/pull/0.log" Dec 01 20:16:42 crc kubenswrapper[4935]: I1201 20:16:42.081106 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796_da239c9b-9e88-43ab-8967-36662c93340c/extract/0.log" Dec 01 20:16:42 crc kubenswrapper[4935]: I1201 20:16:42.094568 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796_da239c9b-9e88-43ab-8967-36662c93340c/util/0.log" Dec 01 20:16:42 crc kubenswrapper[4935]: I1201 20:16:42.110837 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fpk796_da239c9b-9e88-43ab-8967-36662c93340c/pull/0.log" Dec 01 20:16:42 crc kubenswrapper[4935]: I1201 20:16:42.267263 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9_d856ea4e-cbbe-41e2-9e51-15a2efa53b2b/util/0.log" Dec 01 20:16:42 crc kubenswrapper[4935]: I1201 20:16:42.466784 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9_d856ea4e-cbbe-41e2-9e51-15a2efa53b2b/util/0.log" Dec 01 20:16:42 crc kubenswrapper[4935]: I1201 20:16:42.469728 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9_d856ea4e-cbbe-41e2-9e51-15a2efa53b2b/pull/0.log" Dec 01 20:16:42 crc kubenswrapper[4935]: I1201 20:16:42.503072 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9_d856ea4e-cbbe-41e2-9e51-15a2efa53b2b/pull/0.log" Dec 01 20:16:42 crc kubenswrapper[4935]: I1201 20:16:42.509857 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:16:42 crc kubenswrapper[4935]: E1201 20:16:42.510317 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:16:42 crc kubenswrapper[4935]: I1201 20:16:42.701126 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9_d856ea4e-cbbe-41e2-9e51-15a2efa53b2b/util/0.log" Dec 01 20:16:42 crc kubenswrapper[4935]: I1201 20:16:42.711520 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9_d856ea4e-cbbe-41e2-9e51-15a2efa53b2b/extract/0.log" Dec 01 20:16:42 crc kubenswrapper[4935]: I1201 20:16:42.732109 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zvfl9_d856ea4e-cbbe-41e2-9e51-15a2efa53b2b/pull/0.log" Dec 01 20:16:42 crc kubenswrapper[4935]: I1201 20:16:42.897703 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-b5fvn_e5ae1ef2-88df-40ec-bc28-1f7b17d03cad/extract-utilities/0.log" Dec 01 20:16:43 crc kubenswrapper[4935]: I1201 20:16:43.059228 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-b5fvn_e5ae1ef2-88df-40ec-bc28-1f7b17d03cad/extract-utilities/0.log" Dec 01 20:16:43 crc kubenswrapper[4935]: I1201 20:16:43.118184 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-b5fvn_e5ae1ef2-88df-40ec-bc28-1f7b17d03cad/extract-content/0.log" Dec 01 20:16:43 crc kubenswrapper[4935]: I1201 20:16:43.149837 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-b5fvn_e5ae1ef2-88df-40ec-bc28-1f7b17d03cad/extract-content/0.log" Dec 01 20:16:43 crc kubenswrapper[4935]: I1201 20:16:43.269097 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-b5fvn_e5ae1ef2-88df-40ec-bc28-1f7b17d03cad/extract-content/0.log" Dec 01 20:16:43 crc kubenswrapper[4935]: I1201 20:16:43.288914 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-b5fvn_e5ae1ef2-88df-40ec-bc28-1f7b17d03cad/extract-utilities/0.log" Dec 01 20:16:43 crc kubenswrapper[4935]: I1201 20:16:43.452203 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mx79s_f7824b0e-01a6-40fd-a645-510e6e4bb088/extract-utilities/0.log" Dec 01 20:16:43 crc kubenswrapper[4935]: I1201 20:16:43.627834 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mx79s_f7824b0e-01a6-40fd-a645-510e6e4bb088/extract-utilities/0.log" Dec 01 20:16:43 crc kubenswrapper[4935]: I1201 20:16:43.794081 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mx79s_f7824b0e-01a6-40fd-a645-510e6e4bb088/extract-content/0.log" Dec 01 20:16:43 crc kubenswrapper[4935]: I1201 20:16:43.812693 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mx79s_f7824b0e-01a6-40fd-a645-510e6e4bb088/extract-content/0.log" Dec 01 20:16:43 crc kubenswrapper[4935]: I1201 20:16:43.946031 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-b5fvn_e5ae1ef2-88df-40ec-bc28-1f7b17d03cad/registry-server/0.log" Dec 01 20:16:44 crc kubenswrapper[4935]: I1201 20:16:44.115091 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mx79s_f7824b0e-01a6-40fd-a645-510e6e4bb088/extract-utilities/0.log" Dec 01 20:16:44 crc kubenswrapper[4935]: I1201 20:16:44.138618 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mx79s_f7824b0e-01a6-40fd-a645-510e6e4bb088/extract-content/0.log" Dec 01 20:16:44 crc kubenswrapper[4935]: I1201 20:16:44.249031 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-4fmtp_76d4e37a-f26a-4bb0-bbaf-91be51709278/marketplace-operator/0.log" Dec 01 20:16:44 crc kubenswrapper[4935]: I1201 20:16:44.444330 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l5vnc_e07502f2-97bd-468a-bd48-7d309cb9ee99/extract-utilities/0.log" Dec 01 20:16:44 crc kubenswrapper[4935]: I1201 20:16:44.604363 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l5vnc_e07502f2-97bd-468a-bd48-7d309cb9ee99/extract-content/0.log" Dec 01 20:16:44 crc kubenswrapper[4935]: I1201 20:16:44.640873 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l5vnc_e07502f2-97bd-468a-bd48-7d309cb9ee99/extract-utilities/0.log" Dec 01 20:16:44 crc kubenswrapper[4935]: I1201 20:16:44.690217 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l5vnc_e07502f2-97bd-468a-bd48-7d309cb9ee99/extract-content/0.log" Dec 01 20:16:44 crc kubenswrapper[4935]: I1201 20:16:44.850216 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l5vnc_e07502f2-97bd-468a-bd48-7d309cb9ee99/extract-content/0.log" Dec 01 20:16:44 crc kubenswrapper[4935]: I1201 20:16:44.940032 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l5vnc_e07502f2-97bd-468a-bd48-7d309cb9ee99/extract-utilities/0.log" Dec 01 20:16:45 crc kubenswrapper[4935]: I1201 20:16:45.138036 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hpm55_50067650-c466-473e-b59d-b0ca914e1cd1/extract-utilities/0.log" Dec 01 20:16:45 crc kubenswrapper[4935]: I1201 20:16:45.218963 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l5vnc_e07502f2-97bd-468a-bd48-7d309cb9ee99/registry-server/0.log" Dec 01 20:16:45 crc kubenswrapper[4935]: I1201 20:16:45.294627 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hpm55_50067650-c466-473e-b59d-b0ca914e1cd1/extract-content/0.log" Dec 01 20:16:45 crc kubenswrapper[4935]: I1201 20:16:45.341428 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hpm55_50067650-c466-473e-b59d-b0ca914e1cd1/extract-utilities/0.log" Dec 01 20:16:45 crc kubenswrapper[4935]: I1201 20:16:45.374075 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mx79s_f7824b0e-01a6-40fd-a645-510e6e4bb088/registry-server/0.log" Dec 01 20:16:45 crc kubenswrapper[4935]: I1201 20:16:45.400961 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hpm55_50067650-c466-473e-b59d-b0ca914e1cd1/extract-content/0.log" Dec 01 20:16:45 crc kubenswrapper[4935]: I1201 20:16:45.546403 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hpm55_50067650-c466-473e-b59d-b0ca914e1cd1/extract-utilities/0.log" Dec 01 20:16:45 crc kubenswrapper[4935]: I1201 20:16:45.556245 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hpm55_50067650-c466-473e-b59d-b0ca914e1cd1/extract-content/0.log" Dec 01 20:16:45 crc kubenswrapper[4935]: I1201 20:16:45.883707 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-hpm55_50067650-c466-473e-b59d-b0ca914e1cd1/registry-server/0.log" Dec 01 20:16:53 crc kubenswrapper[4935]: I1201 20:16:53.508669 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:16:53 crc kubenswrapper[4935]: E1201 20:16:53.509751 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:16:57 crc kubenswrapper[4935]: I1201 20:16:57.942344 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-4fc9p_c4d2ca33-292e-45b0-b5b0-972516b76b0d/prometheus-operator/0.log" Dec 01 20:16:58 crc kubenswrapper[4935]: I1201 20:16:58.134010 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6df4c89596-b8w2c_133161d9-93f5-4437-bb96-28c2726db1ed/prometheus-operator-admission-webhook/0.log" Dec 01 20:16:58 crc kubenswrapper[4935]: I1201 20:16:58.173992 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6df4c89596-kfjt8_0fa7af9d-21c4-4521-a988-05c5043e7e51/prometheus-operator-admission-webhook/0.log" Dec 01 20:16:58 crc kubenswrapper[4935]: I1201 20:16:58.352286 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-mhznx_f228055b-0716-4766-a2b7-dabacf9de9ad/operator/0.log" Dec 01 20:16:58 crc kubenswrapper[4935]: I1201 20:16:58.384998 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-ui-dashboards-7d5fb4cbfb-mt5kg_202825a3-ae0e-443f-ac33-3ce527a1bbd3/observability-ui-dashboards/0.log" Dec 01 20:16:58 crc kubenswrapper[4935]: I1201 20:16:58.520896 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-htdg4_c1d7978e-7d35-4b9a-97f8-981562161cde/perses-operator/0.log" Dec 01 20:17:04 crc kubenswrapper[4935]: I1201 20:17:04.508917 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:17:04 crc kubenswrapper[4935]: E1201 20:17:04.510021 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:17:11 crc kubenswrapper[4935]: I1201 20:17:11.252003 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-59df859d46-rd5lg_7c66dc44-1b72-4e23-9421-8f8495e7af3a/kube-rbac-proxy/0.log" Dec 01 20:17:11 crc kubenswrapper[4935]: I1201 20:17:11.295756 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-59df859d46-rd5lg_7c66dc44-1b72-4e23-9421-8f8495e7af3a/manager/0.log" Dec 01 20:17:17 crc kubenswrapper[4935]: I1201 20:17:17.508793 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:17:17 crc kubenswrapper[4935]: E1201 20:17:17.509844 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:17:24 crc kubenswrapper[4935]: E1201 20:17:24.078689 4935 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.65:33826->38.102.83.65:38587: write tcp 38.102.83.65:33826->38.102.83.65:38587: write: broken pipe Dec 01 20:17:29 crc kubenswrapper[4935]: I1201 20:17:29.508019 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:17:29 crc kubenswrapper[4935]: E1201 20:17:29.508873 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:17:40 crc kubenswrapper[4935]: I1201 20:17:40.508596 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:17:40 crc kubenswrapper[4935]: E1201 20:17:40.510809 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:17:51 crc kubenswrapper[4935]: I1201 20:17:51.509353 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:17:51 crc kubenswrapper[4935]: E1201 20:17:51.510453 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:18:04 crc kubenswrapper[4935]: I1201 20:18:04.507955 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:18:04 crc kubenswrapper[4935]: E1201 20:18:04.508690 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:18:17 crc kubenswrapper[4935]: I1201 20:18:17.508052 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:18:17 crc kubenswrapper[4935]: E1201 20:18:17.509022 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:18:31 crc kubenswrapper[4935]: I1201 20:18:31.508352 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:18:31 crc kubenswrapper[4935]: E1201 20:18:31.509546 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:18:45 crc kubenswrapper[4935]: I1201 20:18:45.509196 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:18:45 crc kubenswrapper[4935]: E1201 20:18:45.510315 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:19:00 crc kubenswrapper[4935]: I1201 20:19:00.508280 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:19:00 crc kubenswrapper[4935]: E1201 20:19:00.509235 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:19:08 crc kubenswrapper[4935]: I1201 20:19:08.047287 4935 generic.go:334] "Generic (PLEG): container finished" podID="d0b1f897-ab76-4bf0-888c-748a3dc52c96" containerID="bfdced1dd3c2098cde224aafb4b4579cf4bdfe5c6340b9b2019be146876dcfeb" exitCode=0 Dec 01 20:19:08 crc kubenswrapper[4935]: I1201 20:19:08.047380 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-rhfwr/must-gather-sxcst" event={"ID":"d0b1f897-ab76-4bf0-888c-748a3dc52c96","Type":"ContainerDied","Data":"bfdced1dd3c2098cde224aafb4b4579cf4bdfe5c6340b9b2019be146876dcfeb"} Dec 01 20:19:08 crc kubenswrapper[4935]: I1201 20:19:08.048778 4935 scope.go:117] "RemoveContainer" containerID="bfdced1dd3c2098cde224aafb4b4579cf4bdfe5c6340b9b2019be146876dcfeb" Dec 01 20:19:08 crc kubenswrapper[4935]: I1201 20:19:08.963615 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rhfwr_must-gather-sxcst_d0b1f897-ab76-4bf0-888c-748a3dc52c96/gather/0.log" Dec 01 20:19:11 crc kubenswrapper[4935]: I1201 20:19:11.508874 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:19:11 crc kubenswrapper[4935]: E1201 20:19:11.509689 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:19:17 crc kubenswrapper[4935]: I1201 20:19:17.734996 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-rhfwr/must-gather-sxcst"] Dec 01 20:19:17 crc kubenswrapper[4935]: I1201 20:19:17.735971 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-rhfwr/must-gather-sxcst" podUID="d0b1f897-ab76-4bf0-888c-748a3dc52c96" containerName="copy" containerID="cri-o://225c2b85640ad2766db30fa18036c1aa361ca974decd24b656cc64dfab2d3d13" gracePeriod=2 Dec 01 20:19:17 crc kubenswrapper[4935]: I1201 20:19:17.749620 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-rhfwr/must-gather-sxcst"] Dec 01 20:19:18 crc kubenswrapper[4935]: I1201 20:19:18.193551 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rhfwr_must-gather-sxcst_d0b1f897-ab76-4bf0-888c-748a3dc52c96/copy/0.log" Dec 01 20:19:18 crc kubenswrapper[4935]: I1201 20:19:18.194847 4935 generic.go:334] "Generic (PLEG): container finished" podID="d0b1f897-ab76-4bf0-888c-748a3dc52c96" containerID="225c2b85640ad2766db30fa18036c1aa361ca974decd24b656cc64dfab2d3d13" exitCode=143 Dec 01 20:19:18 crc kubenswrapper[4935]: I1201 20:19:18.194910 4935 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2f64e3e6968b91e4e568a2128e607ffeb6c56ce79118e8b33f8b1c1cf97b441" Dec 01 20:19:18 crc kubenswrapper[4935]: I1201 20:19:18.272559 4935 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-rhfwr_must-gather-sxcst_d0b1f897-ab76-4bf0-888c-748a3dc52c96/copy/0.log" Dec 01 20:19:18 crc kubenswrapper[4935]: I1201 20:19:18.273038 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/must-gather-sxcst" Dec 01 20:19:18 crc kubenswrapper[4935]: I1201 20:19:18.393797 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d0b1f897-ab76-4bf0-888c-748a3dc52c96-must-gather-output\") pod \"d0b1f897-ab76-4bf0-888c-748a3dc52c96\" (UID: \"d0b1f897-ab76-4bf0-888c-748a3dc52c96\") " Dec 01 20:19:18 crc kubenswrapper[4935]: I1201 20:19:18.394107 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8vsw\" (UniqueName: \"kubernetes.io/projected/d0b1f897-ab76-4bf0-888c-748a3dc52c96-kube-api-access-r8vsw\") pod \"d0b1f897-ab76-4bf0-888c-748a3dc52c96\" (UID: \"d0b1f897-ab76-4bf0-888c-748a3dc52c96\") " Dec 01 20:19:18 crc kubenswrapper[4935]: I1201 20:19:18.421264 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0b1f897-ab76-4bf0-888c-748a3dc52c96-kube-api-access-r8vsw" (OuterVolumeSpecName: "kube-api-access-r8vsw") pod "d0b1f897-ab76-4bf0-888c-748a3dc52c96" (UID: "d0b1f897-ab76-4bf0-888c-748a3dc52c96"). InnerVolumeSpecName "kube-api-access-r8vsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:19:18 crc kubenswrapper[4935]: I1201 20:19:18.497950 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8vsw\" (UniqueName: \"kubernetes.io/projected/d0b1f897-ab76-4bf0-888c-748a3dc52c96-kube-api-access-r8vsw\") on node \"crc\" DevicePath \"\"" Dec 01 20:19:18 crc kubenswrapper[4935]: I1201 20:19:18.656334 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0b1f897-ab76-4bf0-888c-748a3dc52c96-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "d0b1f897-ab76-4bf0-888c-748a3dc52c96" (UID: "d0b1f897-ab76-4bf0-888c-748a3dc52c96"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:19:18 crc kubenswrapper[4935]: I1201 20:19:18.702321 4935 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d0b1f897-ab76-4bf0-888c-748a3dc52c96-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 01 20:19:19 crc kubenswrapper[4935]: I1201 20:19:19.214665 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-rhfwr/must-gather-sxcst" Dec 01 20:19:20 crc kubenswrapper[4935]: I1201 20:19:20.523035 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0b1f897-ab76-4bf0-888c-748a3dc52c96" path="/var/lib/kubelet/pods/d0b1f897-ab76-4bf0-888c-748a3dc52c96/volumes" Dec 01 20:19:26 crc kubenswrapper[4935]: I1201 20:19:26.515635 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:19:26 crc kubenswrapper[4935]: E1201 20:19:26.516961 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:19:40 crc kubenswrapper[4935]: I1201 20:19:40.511017 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:19:40 crc kubenswrapper[4935]: E1201 20:19:40.514985 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:19:54 crc kubenswrapper[4935]: I1201 20:19:54.508695 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:19:54 crc kubenswrapper[4935]: E1201 20:19:54.509816 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:20:07 crc kubenswrapper[4935]: I1201 20:20:07.508178 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:20:07 crc kubenswrapper[4935]: E1201 20:20:07.509163 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:20:13 crc kubenswrapper[4935]: I1201 20:20:13.533530 4935 scope.go:117] "RemoveContainer" containerID="225c2b85640ad2766db30fa18036c1aa361ca974decd24b656cc64dfab2d3d13" Dec 01 20:20:13 crc kubenswrapper[4935]: I1201 20:20:13.556461 4935 scope.go:117] "RemoveContainer" containerID="bfdced1dd3c2098cde224aafb4b4579cf4bdfe5c6340b9b2019be146876dcfeb" Dec 01 20:20:13 crc kubenswrapper[4935]: I1201 20:20:13.606775 4935 scope.go:117] "RemoveContainer" containerID="cd41c4d1b385fc74b3b288297e4a06a7377e7a1237e7553e66de1094d771342c" Dec 01 20:20:22 crc kubenswrapper[4935]: I1201 20:20:22.509525 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:20:22 crc kubenswrapper[4935]: E1201 20:20:22.510759 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:20:36 crc kubenswrapper[4935]: I1201 20:20:36.539296 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:20:36 crc kubenswrapper[4935]: E1201 20:20:36.540982 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:20:48 crc kubenswrapper[4935]: I1201 20:20:48.509989 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:20:48 crc kubenswrapper[4935]: E1201 20:20:48.511303 4935 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zznnp_openshift-machine-config-operator(56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522)\"" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.414586 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-j7l47"] Dec 01 20:20:56 crc kubenswrapper[4935]: E1201 20:20:56.416772 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e51a17ff-3335-49cb-aef0-a39021470de1" containerName="extract-content" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.416800 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e51a17ff-3335-49cb-aef0-a39021470de1" containerName="extract-content" Dec 01 20:20:56 crc kubenswrapper[4935]: E1201 20:20:56.416857 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e51a17ff-3335-49cb-aef0-a39021470de1" containerName="extract-utilities" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.416871 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e51a17ff-3335-49cb-aef0-a39021470de1" containerName="extract-utilities" Dec 01 20:20:56 crc kubenswrapper[4935]: E1201 20:20:56.416902 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0b1f897-ab76-4bf0-888c-748a3dc52c96" containerName="copy" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.416912 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0b1f897-ab76-4bf0-888c-748a3dc52c96" containerName="copy" Dec 01 20:20:56 crc kubenswrapper[4935]: E1201 20:20:56.416970 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e51a17ff-3335-49cb-aef0-a39021470de1" containerName="registry-server" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.416979 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="e51a17ff-3335-49cb-aef0-a39021470de1" containerName="registry-server" Dec 01 20:20:56 crc kubenswrapper[4935]: E1201 20:20:56.417003 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0b1f897-ab76-4bf0-888c-748a3dc52c96" containerName="gather" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.417012 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0b1f897-ab76-4bf0-888c-748a3dc52c96" containerName="gather" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.417445 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0b1f897-ab76-4bf0-888c-748a3dc52c96" containerName="gather" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.417473 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0b1f897-ab76-4bf0-888c-748a3dc52c96" containerName="copy" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.417487 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="e51a17ff-3335-49cb-aef0-a39021470de1" containerName="registry-server" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.422778 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.443232 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j7l47"] Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.512537 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-catalog-content\") pod \"community-operators-j7l47\" (UID: \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\") " pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.512687 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-utilities\") pod \"community-operators-j7l47\" (UID: \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\") " pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.512763 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6twns\" (UniqueName: \"kubernetes.io/projected/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-kube-api-access-6twns\") pod \"community-operators-j7l47\" (UID: \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\") " pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.616083 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-utilities\") pod \"community-operators-j7l47\" (UID: \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\") " pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.616516 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6twns\" (UniqueName: \"kubernetes.io/projected/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-kube-api-access-6twns\") pod \"community-operators-j7l47\" (UID: \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\") " pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.616632 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-utilities\") pod \"community-operators-j7l47\" (UID: \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\") " pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.617050 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-catalog-content\") pod \"community-operators-j7l47\" (UID: \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\") " pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.620242 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-catalog-content\") pod \"community-operators-j7l47\" (UID: \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\") " pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.648655 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6twns\" (UniqueName: \"kubernetes.io/projected/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-kube-api-access-6twns\") pod \"community-operators-j7l47\" (UID: \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\") " pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:20:56 crc kubenswrapper[4935]: I1201 20:20:56.769886 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:20:57 crc kubenswrapper[4935]: I1201 20:20:57.308682 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j7l47"] Dec 01 20:20:57 crc kubenswrapper[4935]: I1201 20:20:57.383962 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j7l47" event={"ID":"0d11590f-204a-436b-bc70-3ba1ffcdcb8a","Type":"ContainerStarted","Data":"66e0fa427dd02b45ed02eed820b5c49ce8760f1ef78c42ad8f872ca85f8967b9"} Dec 01 20:20:58 crc kubenswrapper[4935]: I1201 20:20:58.396844 4935 generic.go:334] "Generic (PLEG): container finished" podID="0d11590f-204a-436b-bc70-3ba1ffcdcb8a" containerID="9e658b1dae0d67743205febb7b94d452b3b65ae1208f0cb1fbeffb7b83574c0b" exitCode=0 Dec 01 20:20:58 crc kubenswrapper[4935]: I1201 20:20:58.396929 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j7l47" event={"ID":"0d11590f-204a-436b-bc70-3ba1ffcdcb8a","Type":"ContainerDied","Data":"9e658b1dae0d67743205febb7b94d452b3b65ae1208f0cb1fbeffb7b83574c0b"} Dec 01 20:20:58 crc kubenswrapper[4935]: I1201 20:20:58.399851 4935 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:21:00 crc kubenswrapper[4935]: I1201 20:21:00.439791 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j7l47" event={"ID":"0d11590f-204a-436b-bc70-3ba1ffcdcb8a","Type":"ContainerStarted","Data":"0bae50764db1be924c9068caee237a8f746d03666e30375cbf48945f6acb4699"} Dec 01 20:21:01 crc kubenswrapper[4935]: I1201 20:21:01.453305 4935 generic.go:334] "Generic (PLEG): container finished" podID="0d11590f-204a-436b-bc70-3ba1ffcdcb8a" containerID="0bae50764db1be924c9068caee237a8f746d03666e30375cbf48945f6acb4699" exitCode=0 Dec 01 20:21:01 crc kubenswrapper[4935]: I1201 20:21:01.453403 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j7l47" event={"ID":"0d11590f-204a-436b-bc70-3ba1ffcdcb8a","Type":"ContainerDied","Data":"0bae50764db1be924c9068caee237a8f746d03666e30375cbf48945f6acb4699"} Dec 01 20:21:02 crc kubenswrapper[4935]: I1201 20:21:02.467759 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j7l47" event={"ID":"0d11590f-204a-436b-bc70-3ba1ffcdcb8a","Type":"ContainerStarted","Data":"b6543f7a012e0c84b23eca3e5a67d2dea7ef7a5368f381e7f5934ea859d2af3e"} Dec 01 20:21:02 crc kubenswrapper[4935]: I1201 20:21:02.494301 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-j7l47" podStartSLOduration=2.710121198 podStartE2EDuration="6.494280166s" podCreationTimestamp="2025-12-01 20:20:56 +0000 UTC" firstStartedPulling="2025-12-01 20:20:58.399587733 +0000 UTC m=+6672.421216992" lastFinishedPulling="2025-12-01 20:21:02.183746691 +0000 UTC m=+6676.205375960" observedRunningTime="2025-12-01 20:21:02.486044401 +0000 UTC m=+6676.507673650" watchObservedRunningTime="2025-12-01 20:21:02.494280166 +0000 UTC m=+6676.515909425" Dec 01 20:21:02 crc kubenswrapper[4935]: I1201 20:21:02.508164 4935 scope.go:117] "RemoveContainer" containerID="18fbeb14eccf52d0760801cba44eea3ba3f9464d7129669281038d5be8071e6b" Dec 01 20:21:03 crc kubenswrapper[4935]: I1201 20:21:03.486373 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" event={"ID":"56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522","Type":"ContainerStarted","Data":"ba7067171c145672f007a07bdfe2f12f8b41023603d35e805371d48cd57ffdfb"} Dec 01 20:21:06 crc kubenswrapper[4935]: I1201 20:21:06.770803 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:21:06 crc kubenswrapper[4935]: I1201 20:21:06.771283 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:21:06 crc kubenswrapper[4935]: I1201 20:21:06.822123 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:21:07 crc kubenswrapper[4935]: I1201 20:21:07.606469 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:21:07 crc kubenswrapper[4935]: I1201 20:21:07.692423 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j7l47"] Dec 01 20:21:09 crc kubenswrapper[4935]: I1201 20:21:09.567839 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-j7l47" podUID="0d11590f-204a-436b-bc70-3ba1ffcdcb8a" containerName="registry-server" containerID="cri-o://b6543f7a012e0c84b23eca3e5a67d2dea7ef7a5368f381e7f5934ea859d2af3e" gracePeriod=2 Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.103251 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.274704 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-utilities\") pod \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\" (UID: \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\") " Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.274952 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6twns\" (UniqueName: \"kubernetes.io/projected/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-kube-api-access-6twns\") pod \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\" (UID: \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\") " Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.275387 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-catalog-content\") pod \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\" (UID: \"0d11590f-204a-436b-bc70-3ba1ffcdcb8a\") " Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.275399 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-utilities" (OuterVolumeSpecName: "utilities") pod "0d11590f-204a-436b-bc70-3ba1ffcdcb8a" (UID: "0d11590f-204a-436b-bc70-3ba1ffcdcb8a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.277332 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.281509 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-kube-api-access-6twns" (OuterVolumeSpecName: "kube-api-access-6twns") pod "0d11590f-204a-436b-bc70-3ba1ffcdcb8a" (UID: "0d11590f-204a-436b-bc70-3ba1ffcdcb8a"). InnerVolumeSpecName "kube-api-access-6twns". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.324261 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0d11590f-204a-436b-bc70-3ba1ffcdcb8a" (UID: "0d11590f-204a-436b-bc70-3ba1ffcdcb8a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.378768 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6twns\" (UniqueName: \"kubernetes.io/projected/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-kube-api-access-6twns\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.378801 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d11590f-204a-436b-bc70-3ba1ffcdcb8a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.588864 4935 generic.go:334] "Generic (PLEG): container finished" podID="0d11590f-204a-436b-bc70-3ba1ffcdcb8a" containerID="b6543f7a012e0c84b23eca3e5a67d2dea7ef7a5368f381e7f5934ea859d2af3e" exitCode=0 Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.588916 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j7l47" event={"ID":"0d11590f-204a-436b-bc70-3ba1ffcdcb8a","Type":"ContainerDied","Data":"b6543f7a012e0c84b23eca3e5a67d2dea7ef7a5368f381e7f5934ea859d2af3e"} Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.588947 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j7l47" event={"ID":"0d11590f-204a-436b-bc70-3ba1ffcdcb8a","Type":"ContainerDied","Data":"66e0fa427dd02b45ed02eed820b5c49ce8760f1ef78c42ad8f872ca85f8967b9"} Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.588970 4935 scope.go:117] "RemoveContainer" containerID="b6543f7a012e0c84b23eca3e5a67d2dea7ef7a5368f381e7f5934ea859d2af3e" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.589129 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j7l47" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.619865 4935 scope.go:117] "RemoveContainer" containerID="0bae50764db1be924c9068caee237a8f746d03666e30375cbf48945f6acb4699" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.630321 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j7l47"] Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.640171 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-j7l47"] Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.640812 4935 scope.go:117] "RemoveContainer" containerID="9e658b1dae0d67743205febb7b94d452b3b65ae1208f0cb1fbeffb7b83574c0b" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.712390 4935 scope.go:117] "RemoveContainer" containerID="b6543f7a012e0c84b23eca3e5a67d2dea7ef7a5368f381e7f5934ea859d2af3e" Dec 01 20:21:10 crc kubenswrapper[4935]: E1201 20:21:10.714315 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6543f7a012e0c84b23eca3e5a67d2dea7ef7a5368f381e7f5934ea859d2af3e\": container with ID starting with b6543f7a012e0c84b23eca3e5a67d2dea7ef7a5368f381e7f5934ea859d2af3e not found: ID does not exist" containerID="b6543f7a012e0c84b23eca3e5a67d2dea7ef7a5368f381e7f5934ea859d2af3e" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.714373 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6543f7a012e0c84b23eca3e5a67d2dea7ef7a5368f381e7f5934ea859d2af3e"} err="failed to get container status \"b6543f7a012e0c84b23eca3e5a67d2dea7ef7a5368f381e7f5934ea859d2af3e\": rpc error: code = NotFound desc = could not find container \"b6543f7a012e0c84b23eca3e5a67d2dea7ef7a5368f381e7f5934ea859d2af3e\": container with ID starting with b6543f7a012e0c84b23eca3e5a67d2dea7ef7a5368f381e7f5934ea859d2af3e not found: ID does not exist" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.714408 4935 scope.go:117] "RemoveContainer" containerID="0bae50764db1be924c9068caee237a8f746d03666e30375cbf48945f6acb4699" Dec 01 20:21:10 crc kubenswrapper[4935]: E1201 20:21:10.716872 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bae50764db1be924c9068caee237a8f746d03666e30375cbf48945f6acb4699\": container with ID starting with 0bae50764db1be924c9068caee237a8f746d03666e30375cbf48945f6acb4699 not found: ID does not exist" containerID="0bae50764db1be924c9068caee237a8f746d03666e30375cbf48945f6acb4699" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.716915 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bae50764db1be924c9068caee237a8f746d03666e30375cbf48945f6acb4699"} err="failed to get container status \"0bae50764db1be924c9068caee237a8f746d03666e30375cbf48945f6acb4699\": rpc error: code = NotFound desc = could not find container \"0bae50764db1be924c9068caee237a8f746d03666e30375cbf48945f6acb4699\": container with ID starting with 0bae50764db1be924c9068caee237a8f746d03666e30375cbf48945f6acb4699 not found: ID does not exist" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.716935 4935 scope.go:117] "RemoveContainer" containerID="9e658b1dae0d67743205febb7b94d452b3b65ae1208f0cb1fbeffb7b83574c0b" Dec 01 20:21:10 crc kubenswrapper[4935]: E1201 20:21:10.717309 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e658b1dae0d67743205febb7b94d452b3b65ae1208f0cb1fbeffb7b83574c0b\": container with ID starting with 9e658b1dae0d67743205febb7b94d452b3b65ae1208f0cb1fbeffb7b83574c0b not found: ID does not exist" containerID="9e658b1dae0d67743205febb7b94d452b3b65ae1208f0cb1fbeffb7b83574c0b" Dec 01 20:21:10 crc kubenswrapper[4935]: I1201 20:21:10.717332 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e658b1dae0d67743205febb7b94d452b3b65ae1208f0cb1fbeffb7b83574c0b"} err="failed to get container status \"9e658b1dae0d67743205febb7b94d452b3b65ae1208f0cb1fbeffb7b83574c0b\": rpc error: code = NotFound desc = could not find container \"9e658b1dae0d67743205febb7b94d452b3b65ae1208f0cb1fbeffb7b83574c0b\": container with ID starting with 9e658b1dae0d67743205febb7b94d452b3b65ae1208f0cb1fbeffb7b83574c0b not found: ID does not exist" Dec 01 20:21:12 crc kubenswrapper[4935]: I1201 20:21:12.523902 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d11590f-204a-436b-bc70-3ba1ffcdcb8a" path="/var/lib/kubelet/pods/0d11590f-204a-436b-bc70-3ba1ffcdcb8a/volumes" Dec 01 20:23:00 crc kubenswrapper[4935]: I1201 20:23:00.848233 4935 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mvpz9"] Dec 01 20:23:00 crc kubenswrapper[4935]: E1201 20:23:00.849623 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d11590f-204a-436b-bc70-3ba1ffcdcb8a" containerName="extract-content" Dec 01 20:23:00 crc kubenswrapper[4935]: I1201 20:23:00.849644 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d11590f-204a-436b-bc70-3ba1ffcdcb8a" containerName="extract-content" Dec 01 20:23:00 crc kubenswrapper[4935]: E1201 20:23:00.849660 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d11590f-204a-436b-bc70-3ba1ffcdcb8a" containerName="registry-server" Dec 01 20:23:00 crc kubenswrapper[4935]: I1201 20:23:00.849669 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d11590f-204a-436b-bc70-3ba1ffcdcb8a" containerName="registry-server" Dec 01 20:23:00 crc kubenswrapper[4935]: E1201 20:23:00.849734 4935 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d11590f-204a-436b-bc70-3ba1ffcdcb8a" containerName="extract-utilities" Dec 01 20:23:00 crc kubenswrapper[4935]: I1201 20:23:00.849744 4935 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d11590f-204a-436b-bc70-3ba1ffcdcb8a" containerName="extract-utilities" Dec 01 20:23:00 crc kubenswrapper[4935]: I1201 20:23:00.850052 4935 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d11590f-204a-436b-bc70-3ba1ffcdcb8a" containerName="registry-server" Dec 01 20:23:00 crc kubenswrapper[4935]: I1201 20:23:00.852330 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:00 crc kubenswrapper[4935]: I1201 20:23:00.864890 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mvpz9"] Dec 01 20:23:00 crc kubenswrapper[4935]: I1201 20:23:00.971762 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ls4lp\" (UniqueName: \"kubernetes.io/projected/8b6c0006-5da7-488c-bf47-13c39cfd9889-kube-api-access-ls4lp\") pod \"redhat-operators-mvpz9\" (UID: \"8b6c0006-5da7-488c-bf47-13c39cfd9889\") " pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:00 crc kubenswrapper[4935]: I1201 20:23:00.971957 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b6c0006-5da7-488c-bf47-13c39cfd9889-utilities\") pod \"redhat-operators-mvpz9\" (UID: \"8b6c0006-5da7-488c-bf47-13c39cfd9889\") " pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:00 crc kubenswrapper[4935]: I1201 20:23:00.972125 4935 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b6c0006-5da7-488c-bf47-13c39cfd9889-catalog-content\") pod \"redhat-operators-mvpz9\" (UID: \"8b6c0006-5da7-488c-bf47-13c39cfd9889\") " pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:01 crc kubenswrapper[4935]: I1201 20:23:01.074044 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ls4lp\" (UniqueName: \"kubernetes.io/projected/8b6c0006-5da7-488c-bf47-13c39cfd9889-kube-api-access-ls4lp\") pod \"redhat-operators-mvpz9\" (UID: \"8b6c0006-5da7-488c-bf47-13c39cfd9889\") " pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:01 crc kubenswrapper[4935]: I1201 20:23:01.074217 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b6c0006-5da7-488c-bf47-13c39cfd9889-utilities\") pod \"redhat-operators-mvpz9\" (UID: \"8b6c0006-5da7-488c-bf47-13c39cfd9889\") " pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:01 crc kubenswrapper[4935]: I1201 20:23:01.074258 4935 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b6c0006-5da7-488c-bf47-13c39cfd9889-catalog-content\") pod \"redhat-operators-mvpz9\" (UID: \"8b6c0006-5da7-488c-bf47-13c39cfd9889\") " pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:01 crc kubenswrapper[4935]: I1201 20:23:01.074785 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b6c0006-5da7-488c-bf47-13c39cfd9889-utilities\") pod \"redhat-operators-mvpz9\" (UID: \"8b6c0006-5da7-488c-bf47-13c39cfd9889\") " pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:01 crc kubenswrapper[4935]: I1201 20:23:01.074865 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b6c0006-5da7-488c-bf47-13c39cfd9889-catalog-content\") pod \"redhat-operators-mvpz9\" (UID: \"8b6c0006-5da7-488c-bf47-13c39cfd9889\") " pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:01 crc kubenswrapper[4935]: I1201 20:23:01.096768 4935 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ls4lp\" (UniqueName: \"kubernetes.io/projected/8b6c0006-5da7-488c-bf47-13c39cfd9889-kube-api-access-ls4lp\") pod \"redhat-operators-mvpz9\" (UID: \"8b6c0006-5da7-488c-bf47-13c39cfd9889\") " pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:01 crc kubenswrapper[4935]: I1201 20:23:01.171769 4935 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:01 crc kubenswrapper[4935]: I1201 20:23:01.655865 4935 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mvpz9"] Dec 01 20:23:01 crc kubenswrapper[4935]: I1201 20:23:01.853309 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvpz9" event={"ID":"8b6c0006-5da7-488c-bf47-13c39cfd9889","Type":"ContainerStarted","Data":"469edbbd20662ae55bbe9163cb8e599517d7e36efbfd1f48dba2d3d71aacbaab"} Dec 01 20:23:02 crc kubenswrapper[4935]: I1201 20:23:02.873563 4935 generic.go:334] "Generic (PLEG): container finished" podID="8b6c0006-5da7-488c-bf47-13c39cfd9889" containerID="b68f0ed9151ff86e7656b864a57bfb194a744235b6cecf769f9cd84c64228922" exitCode=0 Dec 01 20:23:02 crc kubenswrapper[4935]: I1201 20:23:02.873707 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvpz9" event={"ID":"8b6c0006-5da7-488c-bf47-13c39cfd9889","Type":"ContainerDied","Data":"b68f0ed9151ff86e7656b864a57bfb194a744235b6cecf769f9cd84c64228922"} Dec 01 20:23:04 crc kubenswrapper[4935]: I1201 20:23:04.906725 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvpz9" event={"ID":"8b6c0006-5da7-488c-bf47-13c39cfd9889","Type":"ContainerStarted","Data":"c598363afefb190e2244bd7a8305f04ecff028c3b10dbb24f8ecfccd85de7e66"} Dec 01 20:23:07 crc kubenswrapper[4935]: I1201 20:23:07.947172 4935 generic.go:334] "Generic (PLEG): container finished" podID="8b6c0006-5da7-488c-bf47-13c39cfd9889" containerID="c598363afefb190e2244bd7a8305f04ecff028c3b10dbb24f8ecfccd85de7e66" exitCode=0 Dec 01 20:23:07 crc kubenswrapper[4935]: I1201 20:23:07.947260 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvpz9" event={"ID":"8b6c0006-5da7-488c-bf47-13c39cfd9889","Type":"ContainerDied","Data":"c598363afefb190e2244bd7a8305f04ecff028c3b10dbb24f8ecfccd85de7e66"} Dec 01 20:23:09 crc kubenswrapper[4935]: I1201 20:23:09.971807 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvpz9" event={"ID":"8b6c0006-5da7-488c-bf47-13c39cfd9889","Type":"ContainerStarted","Data":"a8a0968bc22e727a63f5675443cc9dcca5dc803190b5e5cf9aad2738ee70419f"} Dec 01 20:23:09 crc kubenswrapper[4935]: I1201 20:23:09.999209 4935 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mvpz9" podStartSLOduration=3.901019163 podStartE2EDuration="9.999188424s" podCreationTimestamp="2025-12-01 20:23:00 +0000 UTC" firstStartedPulling="2025-12-01 20:23:02.879176222 +0000 UTC m=+6796.900805501" lastFinishedPulling="2025-12-01 20:23:08.977345493 +0000 UTC m=+6802.998974762" observedRunningTime="2025-12-01 20:23:09.993526589 +0000 UTC m=+6804.015155848" watchObservedRunningTime="2025-12-01 20:23:09.999188424 +0000 UTC m=+6804.020817683" Dec 01 20:23:11 crc kubenswrapper[4935]: I1201 20:23:11.172574 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:11 crc kubenswrapper[4935]: I1201 20:23:11.172966 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:12 crc kubenswrapper[4935]: I1201 20:23:12.231616 4935 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mvpz9" podUID="8b6c0006-5da7-488c-bf47-13c39cfd9889" containerName="registry-server" probeResult="failure" output=< Dec 01 20:23:12 crc kubenswrapper[4935]: timeout: failed to connect service ":50051" within 1s Dec 01 20:23:12 crc kubenswrapper[4935]: > Dec 01 20:23:21 crc kubenswrapper[4935]: I1201 20:23:21.229215 4935 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:21 crc kubenswrapper[4935]: I1201 20:23:21.293406 4935 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:21 crc kubenswrapper[4935]: I1201 20:23:21.472670 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mvpz9"] Dec 01 20:23:23 crc kubenswrapper[4935]: I1201 20:23:23.131906 4935 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mvpz9" podUID="8b6c0006-5da7-488c-bf47-13c39cfd9889" containerName="registry-server" containerID="cri-o://a8a0968bc22e727a63f5675443cc9dcca5dc803190b5e5cf9aad2738ee70419f" gracePeriod=2 Dec 01 20:23:23 crc kubenswrapper[4935]: I1201 20:23:23.688964 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:23 crc kubenswrapper[4935]: I1201 20:23:23.747936 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b6c0006-5da7-488c-bf47-13c39cfd9889-catalog-content\") pod \"8b6c0006-5da7-488c-bf47-13c39cfd9889\" (UID: \"8b6c0006-5da7-488c-bf47-13c39cfd9889\") " Dec 01 20:23:23 crc kubenswrapper[4935]: I1201 20:23:23.748100 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ls4lp\" (UniqueName: \"kubernetes.io/projected/8b6c0006-5da7-488c-bf47-13c39cfd9889-kube-api-access-ls4lp\") pod \"8b6c0006-5da7-488c-bf47-13c39cfd9889\" (UID: \"8b6c0006-5da7-488c-bf47-13c39cfd9889\") " Dec 01 20:23:23 crc kubenswrapper[4935]: I1201 20:23:23.748285 4935 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b6c0006-5da7-488c-bf47-13c39cfd9889-utilities\") pod \"8b6c0006-5da7-488c-bf47-13c39cfd9889\" (UID: \"8b6c0006-5da7-488c-bf47-13c39cfd9889\") " Dec 01 20:23:23 crc kubenswrapper[4935]: I1201 20:23:23.749224 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b6c0006-5da7-488c-bf47-13c39cfd9889-utilities" (OuterVolumeSpecName: "utilities") pod "8b6c0006-5da7-488c-bf47-13c39cfd9889" (UID: "8b6c0006-5da7-488c-bf47-13c39cfd9889"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:23:23 crc kubenswrapper[4935]: I1201 20:23:23.749868 4935 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b6c0006-5da7-488c-bf47-13c39cfd9889-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:23 crc kubenswrapper[4935]: I1201 20:23:23.755300 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b6c0006-5da7-488c-bf47-13c39cfd9889-kube-api-access-ls4lp" (OuterVolumeSpecName: "kube-api-access-ls4lp") pod "8b6c0006-5da7-488c-bf47-13c39cfd9889" (UID: "8b6c0006-5da7-488c-bf47-13c39cfd9889"). InnerVolumeSpecName "kube-api-access-ls4lp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:23:23 crc kubenswrapper[4935]: I1201 20:23:23.850960 4935 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ls4lp\" (UniqueName: \"kubernetes.io/projected/8b6c0006-5da7-488c-bf47-13c39cfd9889-kube-api-access-ls4lp\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:23 crc kubenswrapper[4935]: I1201 20:23:23.864363 4935 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b6c0006-5da7-488c-bf47-13c39cfd9889-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b6c0006-5da7-488c-bf47-13c39cfd9889" (UID: "8b6c0006-5da7-488c-bf47-13c39cfd9889"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:23:23 crc kubenswrapper[4935]: I1201 20:23:23.952290 4935 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b6c0006-5da7-488c-bf47-13c39cfd9889-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.150370 4935 generic.go:334] "Generic (PLEG): container finished" podID="8b6c0006-5da7-488c-bf47-13c39cfd9889" containerID="a8a0968bc22e727a63f5675443cc9dcca5dc803190b5e5cf9aad2738ee70419f" exitCode=0 Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.150435 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvpz9" event={"ID":"8b6c0006-5da7-488c-bf47-13c39cfd9889","Type":"ContainerDied","Data":"a8a0968bc22e727a63f5675443cc9dcca5dc803190b5e5cf9aad2738ee70419f"} Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.150464 4935 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvpz9" Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.150486 4935 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvpz9" event={"ID":"8b6c0006-5da7-488c-bf47-13c39cfd9889","Type":"ContainerDied","Data":"469edbbd20662ae55bbe9163cb8e599517d7e36efbfd1f48dba2d3d71aacbaab"} Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.150515 4935 scope.go:117] "RemoveContainer" containerID="a8a0968bc22e727a63f5675443cc9dcca5dc803190b5e5cf9aad2738ee70419f" Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.188999 4935 scope.go:117] "RemoveContainer" containerID="c598363afefb190e2244bd7a8305f04ecff028c3b10dbb24f8ecfccd85de7e66" Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.213815 4935 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mvpz9"] Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.227483 4935 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mvpz9"] Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.237818 4935 scope.go:117] "RemoveContainer" containerID="b68f0ed9151ff86e7656b864a57bfb194a744235b6cecf769f9cd84c64228922" Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.289065 4935 scope.go:117] "RemoveContainer" containerID="a8a0968bc22e727a63f5675443cc9dcca5dc803190b5e5cf9aad2738ee70419f" Dec 01 20:23:24 crc kubenswrapper[4935]: E1201 20:23:24.290464 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8a0968bc22e727a63f5675443cc9dcca5dc803190b5e5cf9aad2738ee70419f\": container with ID starting with a8a0968bc22e727a63f5675443cc9dcca5dc803190b5e5cf9aad2738ee70419f not found: ID does not exist" containerID="a8a0968bc22e727a63f5675443cc9dcca5dc803190b5e5cf9aad2738ee70419f" Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.290518 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8a0968bc22e727a63f5675443cc9dcca5dc803190b5e5cf9aad2738ee70419f"} err="failed to get container status \"a8a0968bc22e727a63f5675443cc9dcca5dc803190b5e5cf9aad2738ee70419f\": rpc error: code = NotFound desc = could not find container \"a8a0968bc22e727a63f5675443cc9dcca5dc803190b5e5cf9aad2738ee70419f\": container with ID starting with a8a0968bc22e727a63f5675443cc9dcca5dc803190b5e5cf9aad2738ee70419f not found: ID does not exist" Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.290551 4935 scope.go:117] "RemoveContainer" containerID="c598363afefb190e2244bd7a8305f04ecff028c3b10dbb24f8ecfccd85de7e66" Dec 01 20:23:24 crc kubenswrapper[4935]: E1201 20:23:24.291087 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c598363afefb190e2244bd7a8305f04ecff028c3b10dbb24f8ecfccd85de7e66\": container with ID starting with c598363afefb190e2244bd7a8305f04ecff028c3b10dbb24f8ecfccd85de7e66 not found: ID does not exist" containerID="c598363afefb190e2244bd7a8305f04ecff028c3b10dbb24f8ecfccd85de7e66" Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.291123 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c598363afefb190e2244bd7a8305f04ecff028c3b10dbb24f8ecfccd85de7e66"} err="failed to get container status \"c598363afefb190e2244bd7a8305f04ecff028c3b10dbb24f8ecfccd85de7e66\": rpc error: code = NotFound desc = could not find container \"c598363afefb190e2244bd7a8305f04ecff028c3b10dbb24f8ecfccd85de7e66\": container with ID starting with c598363afefb190e2244bd7a8305f04ecff028c3b10dbb24f8ecfccd85de7e66 not found: ID does not exist" Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.291200 4935 scope.go:117] "RemoveContainer" containerID="b68f0ed9151ff86e7656b864a57bfb194a744235b6cecf769f9cd84c64228922" Dec 01 20:23:24 crc kubenswrapper[4935]: E1201 20:23:24.291489 4935 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b68f0ed9151ff86e7656b864a57bfb194a744235b6cecf769f9cd84c64228922\": container with ID starting with b68f0ed9151ff86e7656b864a57bfb194a744235b6cecf769f9cd84c64228922 not found: ID does not exist" containerID="b68f0ed9151ff86e7656b864a57bfb194a744235b6cecf769f9cd84c64228922" Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.291545 4935 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b68f0ed9151ff86e7656b864a57bfb194a744235b6cecf769f9cd84c64228922"} err="failed to get container status \"b68f0ed9151ff86e7656b864a57bfb194a744235b6cecf769f9cd84c64228922\": rpc error: code = NotFound desc = could not find container \"b68f0ed9151ff86e7656b864a57bfb194a744235b6cecf769f9cd84c64228922\": container with ID starting with b68f0ed9151ff86e7656b864a57bfb194a744235b6cecf769f9cd84c64228922 not found: ID does not exist" Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.345861 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.345940 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:23:24 crc kubenswrapper[4935]: I1201 20:23:24.526006 4935 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b6c0006-5da7-488c-bf47-13c39cfd9889" path="/var/lib/kubelet/pods/8b6c0006-5da7-488c-bf47-13c39cfd9889/volumes" Dec 01 20:23:54 crc kubenswrapper[4935]: I1201 20:23:54.346852 4935 patch_prober.go:28] interesting pod/machine-config-daemon-zznnp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:23:54 crc kubenswrapper[4935]: I1201 20:23:54.347385 4935 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zznnp" podUID="56ed42c4-6eca-40bb-8eb4-0f9c2b7d1522" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113374542024452 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113374543017370 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113356672016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113356673015467 5ustar corecore